Home
last modified time | relevance | path

Searched refs:d (Results 1 – 25 of 639) sorted by relevance

12345678910>>...26

/linux-4.19.296/lib/raid6/
Davx2.c46 int d, z, z0; in raid6_avx21_gen_syndrome() local
57 for (d = 0; d < bytes; d += 32) { in raid6_avx21_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome()
59 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome()
60 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
62 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
64 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome()
71 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome()
80 asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d])); in raid6_avx21_gen_syndrome()
82 asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d])); in raid6_avx21_gen_syndrome()
[all …]
Dsse2.c45 int d, z, z0; in raid6_sse21_gen_syndrome() local
56 for ( d = 0 ; d < bytes ; d += 16 ) { in raid6_sse21_gen_syndrome()
57 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome()
58 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome()
59 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
61 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
63 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
71 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
81 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_gen_syndrome()
83 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse21_gen_syndrome()
[all …]
Davx512.c53 int d, z, z0; in raid6_avx5121_gen_syndrome() local
66 for (d = 0; d < bytes; d += 64) { in raid6_avx5121_gen_syndrome()
73 : "m" (dptr[z0][d]), "m" (dptr[z0-1][d])); in raid6_avx5121_gen_syndrome()
85 : "m" (dptr[z][d])); in raid6_avx5121_gen_syndrome()
99 : "m" (p[d]), "m" (q[d])); in raid6_avx5121_gen_syndrome()
111 int d, z, z0; in raid6_avx5121_xor_syndrome() local
122 for (d = 0 ; d < bytes ; d += 64) { in raid6_avx5121_xor_syndrome()
127 : "m" (dptr[z0][d]), "m" (p[d])); in raid6_avx5121_xor_syndrome()
140 : "m" (dptr[z][d])); in raid6_avx5121_xor_syndrome()
158 : "m" (q[d]), "m" (p[d])); in raid6_avx5121_xor_syndrome()
[all …]
Dsse1.c49 int d, z, z0; in raid6_sse11_gen_syndrome() local
60 for ( d = 0 ; d < bytes ; d += 8 ) { in raid6_sse11_gen_syndrome()
61 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome()
62 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome()
63 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
65 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
67 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome()
75 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome()
85 asm volatile("movntq %%mm2,%0" : "=m" (p[d])); in raid6_sse11_gen_syndrome()
86 asm volatile("movntq %%mm4,%0" : "=m" (q[d])); in raid6_sse11_gen_syndrome()
[all …]
Dmmx.c44 int d, z, z0; in raid6_mmx1_gen_syndrome() local
55 for ( d = 0 ; d < bytes ; d += 8 ) { in raid6_mmx1_gen_syndrome()
56 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx1_gen_syndrome()
59 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_mmx1_gen_syndrome()
68 asm volatile("movq %%mm2,%0" : "=m" (p[d])); in raid6_mmx1_gen_syndrome()
70 asm volatile("movq %%mm4,%0" : "=m" (q[d])); in raid6_mmx1_gen_syndrome()
92 int d, z, z0; in raid6_mmx2_gen_syndrome() local
104 for ( d = 0 ; d < bytes ; d += 16 ) { in raid6_mmx2_gen_syndrome()
105 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx2_gen_syndrome()
106 asm volatile("movq %0,%%mm3" : : "m" (dptr[z0][d+8])); in raid6_mmx2_gen_syndrome()
[all …]
/linux-4.19.296/drivers/base/regmap/
Dregmap-irq.c58 struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data); in regmap_irq_lock() local
60 mutex_lock(&d->lock); in regmap_irq_lock()
63 static int regmap_irq_update_bits(struct regmap_irq_chip_data *d, in regmap_irq_update_bits() argument
67 if (d->chip->mask_writeonly) in regmap_irq_update_bits()
68 return regmap_write_bits(d->map, reg, mask, val); in regmap_irq_update_bits()
70 return regmap_update_bits(d->map, reg, mask, val); in regmap_irq_update_bits()
75 struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data); in regmap_irq_sync_unlock() local
76 struct regmap *map = d->map; in regmap_irq_sync_unlock()
81 if (d->chip->runtime_pm) { in regmap_irq_sync_unlock()
93 for (i = 0; i < d->chip->num_regs; i++) { in regmap_irq_sync_unlock()
[all …]
/linux-4.19.296/drivers/media/usb/dvb-usb-v2/
Ddvb_usb_core.c35 static int dvb_usbv2_download_firmware(struct dvb_usb_device *d, in dvb_usbv2_download_firmware() argument
40 dev_dbg(&d->udev->dev, "%s:\n", __func__); in dvb_usbv2_download_firmware()
42 if (!d->props->download_firmware) { in dvb_usbv2_download_firmware()
47 ret = request_firmware(&fw, name, &d->udev->dev); in dvb_usbv2_download_firmware()
49 dev_err(&d->udev->dev, in dvb_usbv2_download_firmware()
55 dev_info(&d->udev->dev, "%s: downloading firmware from file '%s'\n", in dvb_usbv2_download_firmware()
58 ret = d->props->download_firmware(d, fw); in dvb_usbv2_download_firmware()
65 dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret); in dvb_usbv2_download_firmware()
69 static int dvb_usbv2_i2c_init(struct dvb_usb_device *d) in dvb_usbv2_i2c_init() argument
72 dev_dbg(&d->udev->dev, "%s:\n", __func__); in dvb_usbv2_i2c_init()
[all …]
Danysee.c45 static int anysee_ctrl_msg(struct dvb_usb_device *d, in anysee_ctrl_msg() argument
48 struct anysee_state *state = d_to_priv(d); in anysee_ctrl_msg()
51 mutex_lock(&d->usb_mutex); in anysee_ctrl_msg()
56 dev_dbg(&d->udev->dev, "%s: >>> %*ph\n", __func__, slen, state->buf); in anysee_ctrl_msg()
60 ret = dvb_usbv2_generic_rw_locked(d, state->buf, sizeof(state->buf), in anysee_ctrl_msg()
79 ret = usb_bulk_msg(d->udev, usb_rcvbulkpipe(d->udev, in anysee_ctrl_msg()
80 d->props->generic_bulk_ctrl_endpoint), in anysee_ctrl_msg()
83 dev_dbg(&d->udev->dev, in anysee_ctrl_msg()
87 dev_dbg(&d->udev->dev, "%s: <<< %*ph\n", __func__, in anysee_ctrl_msg()
91 dev_dbg(&d->udev->dev, in anysee_ctrl_msg()
[all …]
Ddvb_usb_urb.c24 static int dvb_usb_v2_generic_io(struct dvb_usb_device *d, in dvb_usb_v2_generic_io() argument
29 if (!wbuf || !wlen || !d->props->generic_bulk_ctrl_endpoint || in dvb_usb_v2_generic_io()
30 !d->props->generic_bulk_ctrl_endpoint_response) { in dvb_usb_v2_generic_io()
31 dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, -EINVAL); in dvb_usb_v2_generic_io()
35 dev_dbg(&d->udev->dev, "%s: >>> %*ph\n", __func__, wlen, wbuf); in dvb_usb_v2_generic_io()
37 ret = usb_bulk_msg(d->udev, usb_sndbulkpipe(d->udev, in dvb_usb_v2_generic_io()
38 d->props->generic_bulk_ctrl_endpoint), wbuf, wlen, in dvb_usb_v2_generic_io()
41 dev_err(&d->udev->dev, "%s: usb_bulk_msg() failed=%d\n", in dvb_usb_v2_generic_io()
48 if (d->props->generic_bulk_ctrl_delay) in dvb_usb_v2_generic_io()
49 usleep_range(d->props->generic_bulk_ctrl_delay, in dvb_usb_v2_generic_io()
[all …]
Drtl28xxu.c30 static int rtl28xxu_ctrl_msg(struct dvb_usb_device *d, struct rtl28xxu_req *req) in rtl28xxu_ctrl_msg() argument
32 struct rtl28xxu_dev *dev = d->priv; in rtl28xxu_ctrl_msg()
37 mutex_lock(&d->usb_mutex); in rtl28xxu_ctrl_msg()
40 dev_err(&d->intf->dev, "too large message %u\n", req->size); in rtl28xxu_ctrl_msg()
49 pipe = usb_sndctrlpipe(d->udev, 0); in rtl28xxu_ctrl_msg()
60 pipe = usb_rcvctrlpipe(d->udev, 0); in rtl28xxu_ctrl_msg()
62 pipe = usb_sndctrlpipe(d->udev, 0); in rtl28xxu_ctrl_msg()
65 ret = usb_control_msg(d->udev, pipe, 0, requesttype, req->value, in rtl28xxu_ctrl_msg()
67 dvb_usb_dbg_usb_control_msg(d->udev, 0, requesttype, req->value, in rtl28xxu_ctrl_msg()
76 mutex_unlock(&d->usb_mutex); in rtl28xxu_ctrl_msg()
[all …]
Dgl861.c20 static int gl861_i2c_msg(struct dvb_usb_device *d, u8 addr, in gl861_i2c_msg() argument
51 dev_err(&d->udev->dev, "%s: wlen=%d, aborting\n", in gl861_i2c_msg()
59 ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0), req, type, in gl861_i2c_msg()
73 struct dvb_usb_device *d = i2c_get_adapdata(adap); in gl861_i2c_xfer() local
79 if (mutex_lock_interruptible(&d->i2c_mutex) < 0) in gl861_i2c_xfer()
85 if (gl861_i2c_msg(d, msg[i].addr, msg[i].buf, in gl861_i2c_xfer()
90 if (gl861_i2c_msg(d, msg[i].addr, msg[i].buf, in gl861_i2c_xfer()
95 mutex_unlock(&d->i2c_mutex); in gl861_i2c_xfer()
138 static int gl861_init(struct dvb_usb_device *d) in gl861_init() argument
146 return usb_set_interface(d->udev, 0, 0); in gl861_init()
[all …]
Daf9035.c45 static int af9035_ctrl_msg(struct dvb_usb_device *d, struct usb_req *req) in af9035_ctrl_msg() argument
51 struct state *state = d_to_priv(d); in af9035_ctrl_msg()
52 struct usb_interface *intf = d->intf; in af9035_ctrl_msg()
56 mutex_lock(&d->usb_mutex); in af9035_ctrl_msg()
85 ret = dvb_usbv2_generic_rw_locked(d, in af9035_ctrl_msg()
122 mutex_unlock(&d->usb_mutex); in af9035_ctrl_msg()
129 static int af9035_wr_regs(struct dvb_usb_device *d, u32 reg, u8 *val, int len) in af9035_wr_regs() argument
131 struct usb_interface *intf = d->intf; in af9035_wr_regs()
149 return af9035_ctrl_msg(d, &req); in af9035_wr_regs()
153 static int af9035_rd_regs(struct dvb_usb_device *d, u32 reg, u8 *val, int len) in af9035_rd_regs() argument
[all …]
/linux-4.19.296/drivers/clk/ti/
Dadpll.c181 static const char *ti_adpll_clk_get_name(struct ti_adpll_data *d, in ti_adpll_clk_get_name() argument
189 err = of_property_read_string_index(d->np, in ti_adpll_clk_get_name()
196 name = devm_kasprintf(d->dev, GFP_KERNEL, "%08lx.adpll.%s", in ti_adpll_clk_get_name()
197 d->pa, postfix); in ti_adpll_clk_get_name()
205 static int ti_adpll_setup_clock(struct ti_adpll_data *d, struct clk *clock, in ti_adpll_setup_clock() argument
213 d->clocks[index].clk = clock; in ti_adpll_setup_clock()
214 d->clocks[index].unregister = unregister; in ti_adpll_setup_clock()
220 dev_warn(d->dev, "clock %s con_id lookup may fail\n", in ti_adpll_setup_clock()
222 snprintf(con_id, 16, "pll%03lx%s", d->pa & 0xfff, postfix + 1); in ti_adpll_setup_clock()
226 d->clocks[index].cl = cl; in ti_adpll_setup_clock()
[all …]
/linux-4.19.296/drivers/sh/intc/
Dcore.c74 struct intc_desc_int *d, in intc_register_irq() argument
84 radix_tree_insert(&d->tree, enum_id, intc_irq_xlate_get(irq)); in intc_register_irq()
95 data[0] = intc_get_mask_handle(desc, d, enum_id, 0); in intc_register_irq()
96 data[1] = intc_get_prio_handle(desc, d, enum_id, 0); in intc_register_irq()
106 data[0] = data[0] ? data[0] : intc_get_mask_handle(desc, d, enum_id, 1); in intc_register_irq()
107 data[1] = data[1] ? data[1] : intc_get_prio_handle(desc, d, enum_id, 1); in intc_register_irq()
117 irq_set_chip_and_handler_name(irq, &d->chip, handle_level_irq, in intc_register_irq()
132 hp = d->prio + d->nr_prio; in intc_register_irq()
144 d->nr_prio++; in intc_register_irq()
148 data[0] = intc_get_sense_handle(desc, d, enum_id); in intc_register_irq()
[all …]
Dvirq.c35 void intc_irq_xlate_set(unsigned int irq, intc_enum id, struct intc_desc_int *d) in intc_irq_xlate_set() argument
41 intc_irq_xlate[irq].desc = d; in intc_irq_xlate_set()
53 struct intc_desc_int *d; in intc_irq_lookup() local
56 list_for_each_entry(d, &intc_list, list) { in intc_irq_lookup()
59 if (strcmp(d->chip.name, chipname) != 0) in intc_irq_lookup()
68 tagged = radix_tree_tag_get(&d->tree, enum_id, in intc_irq_lookup()
73 ptr = radix_tree_lookup(&d->tree, enum_id); in intc_irq_lookup()
116 struct intc_desc_int *d = get_intc_desc(irq); in intc_virq_handler() local
126 addr = INTC_REG(d, _INTC_ADDR_E(handle), 0); in intc_virq_handler()
136 struct intc_desc_int *d, in intc_subgroup_data() argument
[all …]
/linux-4.19.296/crypto/
Ddes_generic.c590 #define ROUND(L, R, A, B, K, d) \ argument
591 B = K[0]; A = K[1]; K += d; \
617 #define DES_PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a)) argument
634 unsigned long a, b, c, d, w; in des_ekey() local
637 d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d]; in des_ekey()
642 pe[15 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; in des_ekey()
643 pe[14 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; in des_ekey()
644 pe[13 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; in des_ekey()
645 pe[12 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b]; in des_ekey()
646 pe[11 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d]; in des_ekey()
[all …]
Dmd5.c45 u32 a, b, c, d; in md5_transform() local
50 d = hash[3]; in md5_transform()
52 MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7); in md5_transform()
53 MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12); in md5_transform()
54 MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17); in md5_transform()
55 MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22); in md5_transform()
56 MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7); in md5_transform()
57 MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12); in md5_transform()
58 MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17); in md5_transform()
59 MD5STEP(F1, b, c, d, a, in[7] + 0xfd469501, 22); in md5_transform()
[all …]
Dmd4.c63 #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s)) argument
64 #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) argument
65 #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) argument
69 u32 a, b, c, d; in md4_transform() local
74 d = hash[3]; in md4_transform()
76 ROUND1(a, b, c, d, in[0], 3); in md4_transform()
77 ROUND1(d, a, b, c, in[1], 7); in md4_transform()
78 ROUND1(c, d, a, b, in[2], 11); in md4_transform()
79 ROUND1(b, c, d, a, in[3], 19); in md4_transform()
80 ROUND1(a, b, c, d, in[4], 3); in md4_transform()
[all …]
/linux-4.19.296/fs/nfs/
Dpnfs_dev.c81 struct nfs4_deviceid_node *d; in _lookup_deviceid() local
83 hlist_for_each_entry_rcu(d, &nfs4_deviceid_cache[hash], node) in _lookup_deviceid()
84 if (d->ld == ld && d->nfs_client == clp && in _lookup_deviceid()
85 !memcmp(&d->deviceid, id, sizeof(*id))) { in _lookup_deviceid()
86 if (atomic_read(&d->ref)) in _lookup_deviceid()
87 return d; in _lookup_deviceid()
99 struct nfs4_deviceid_node *d = NULL; in nfs4_get_device_info() local
149 d = server->pnfs_curr_ld->alloc_deviceid_node(server, pdev, in nfs4_get_device_info()
151 if (d && pdev->nocache) in nfs4_get_device_info()
152 set_bit(NFS_DEVICEID_NOCACHE, &d->flags); in nfs4_get_device_info()
[all …]
/linux-4.19.296/include/linux/
Dirq.h241 #define __irqd_to_state(d) ACCESS_PRIVATE((d)->common, state_use_accessors) argument
243 static inline bool irqd_is_setaffinity_pending(struct irq_data *d) in irqd_is_setaffinity_pending() argument
245 return __irqd_to_state(d) & IRQD_SETAFFINITY_PENDING; in irqd_is_setaffinity_pending()
248 static inline bool irqd_is_per_cpu(struct irq_data *d) in irqd_is_per_cpu() argument
250 return __irqd_to_state(d) & IRQD_PER_CPU; in irqd_is_per_cpu()
253 static inline bool irqd_can_balance(struct irq_data *d) in irqd_can_balance() argument
255 return !(__irqd_to_state(d) & (IRQD_PER_CPU | IRQD_NO_BALANCING)); in irqd_can_balance()
258 static inline bool irqd_affinity_was_set(struct irq_data *d) in irqd_affinity_was_set() argument
260 return __irqd_to_state(d) & IRQD_AFFINITY_SET; in irqd_affinity_was_set()
263 static inline void irqd_mark_affinity_was_set(struct irq_data *d) in irqd_mark_affinity_was_set() argument
[all …]
Doverflow.h56 #define check_add_overflow(a, b, d) ({ \ argument
59 typeof(d) __d = (d); \
65 #define check_sub_overflow(a, b, d) ({ \ argument
68 typeof(d) __d = (d); \
74 #define check_mul_overflow(a, b, d) ({ \ argument
77 typeof(d) __d = (d); \
87 #define __unsigned_add_overflow(a, b, d) ({ \ argument
90 typeof(d) __d = (d); \
96 #define __unsigned_sub_overflow(a, b, d) ({ \ argument
99 typeof(d) __d = (d); \
[all …]
/linux-4.19.296/drivers/iio/
Dindustrialio-sw-device.c35 struct iio_sw_device_type *d = NULL, *iter; in __iio_find_sw_device_type() local
39 d = iter; in __iio_find_sw_device_type()
43 return d; in __iio_find_sw_device_type()
46 int iio_register_sw_device_type(struct iio_sw_device_type *d) in iio_register_sw_device_type() argument
52 iter = __iio_find_sw_device_type(d->name, strlen(d->name)); in iio_register_sw_device_type()
56 list_add_tail(&d->list, &iio_device_types_list); in iio_register_sw_device_type()
62 d->group = configfs_register_default_group(iio_devices_group, d->name, in iio_register_sw_device_type()
64 if (IS_ERR(d->group)) in iio_register_sw_device_type()
65 ret = PTR_ERR(d->group); in iio_register_sw_device_type()
101 struct iio_sw_device *d; in iio_sw_device_create() local
[all …]
/linux-4.19.296/drivers/media/tuners/
Dtda18271-maps.c26 u8 d; /* div */ member
37 { .lomax = 32000, .pd = 0x5f, .d = 0xf0 },
38 { .lomax = 35000, .pd = 0x5e, .d = 0xe0 },
39 { .lomax = 37000, .pd = 0x5d, .d = 0xd0 },
40 { .lomax = 41000, .pd = 0x5c, .d = 0xc0 },
41 { .lomax = 44000, .pd = 0x5b, .d = 0xb0 },
42 { .lomax = 49000, .pd = 0x5a, .d = 0xa0 },
43 { .lomax = 54000, .pd = 0x59, .d = 0x90 },
44 { .lomax = 61000, .pd = 0x58, .d = 0x80 },
45 { .lomax = 65000, .pd = 0x4f, .d = 0x78 },
[all …]
/linux-4.19.296/drivers/irqchip/
Dirq-xtensa-mx.c25 static int xtensa_mx_irq_map(struct irq_domain *d, unsigned int irq, in xtensa_mx_irq_map() argument
29 struct irq_chip *irq_chip = d->host_data; in xtensa_mx_irq_map()
36 return xtensa_irq_map(d, irq, hw); in xtensa_mx_irq_map()
45 static int xtensa_mx_irq_domain_xlate(struct irq_domain *d, in xtensa_mx_irq_domain_xlate() argument
69 static void xtensa_mx_irq_mask(struct irq_data *d) in xtensa_mx_irq_mask() argument
71 unsigned int mask = 1u << d->hwirq; in xtensa_mx_irq_mask()
75 set_er(1u << (xtensa_get_ext_irq_no(d->hwirq) - in xtensa_mx_irq_mask()
84 static void xtensa_mx_irq_unmask(struct irq_data *d) in xtensa_mx_irq_unmask() argument
86 unsigned int mask = 1u << d->hwirq; in xtensa_mx_irq_unmask()
90 set_er(1u << (xtensa_get_ext_irq_no(d->hwirq) - in xtensa_mx_irq_unmask()
[all …]
/linux-4.19.296/lib/
Dsha256.c50 u32 a, b, c, d, e, f, g, h, t1, t2; in sha256_transform() local
63 a = state[0]; b = state[1]; c = state[2]; d = state[3]; in sha256_transform()
68 t2 = e0(a) + Maj(a, b, c); d += t1; h = t1 + t2; in sha256_transform()
69 t1 = g + e1(d) + Ch(d, e, f) + 0x71374491 + W[1]; in sha256_transform()
71 t1 = f + e1(c) + Ch(c, d, e) + 0xb5c0fbcf + W[2]; in sha256_transform()
73 t1 = e + e1(b) + Ch(b, c, d) + 0xe9b5dba5 + W[3]; in sha256_transform()
75 t1 = d + e1(a) + Ch(a, b, c) + 0x3956c25b + W[4]; in sha256_transform()
76 t2 = e0(e) + Maj(e, f, g); h += t1; d = t1 + t2; in sha256_transform()
78 t2 = e0(d) + Maj(d, e, f); g += t1; c = t1 + t2; in sha256_transform()
80 t2 = e0(c) + Maj(c, d, e); f += t1; b = t1 + t2; in sha256_transform()
[all …]

12345678910>>...26