sec2              176 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	struct nvkm_sec2 *sec2;
sec2              249 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	int (*sec2    )(struct nvkm_device *, int idx, struct nvkm_sec2 **);
sec2             2225 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2261 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2297 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2333 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2369 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2435 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = gp102_sec2_new,
sec2             2470 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = tu102_sec2_new,
sec2             2505 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = tu102_sec2_new,
sec2             2540 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = tu102_sec2_new,
sec2             2575 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = tu102_sec2_new,
sec2             2610 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.sec2 = tu102_sec2_new,
sec2             2714 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(SEC2   , device->sec2    , &device->sec2->engine);
sec2             3202 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_SEC2    ,     sec2);
sec2               31 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2 = nvkm_sec2(engine);
sec2               32 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	nvkm_msgqueue_del(&sec2->queue);
sec2               33 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	nvkm_falcon_del(&sec2->falcon);
sec2               34 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	return sec2;
sec2               40 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2 = nvkm_sec2(engine);
sec2               43 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	u32 disp = nvkm_rd32(device, sec2->addr + 0x01c);
sec2               44 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	u32 intr = nvkm_rd32(device, sec2->addr + 0x008) & disp & ~(disp >> 16);
sec2               47 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		schedule_work(&sec2->work);
sec2               48 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		nvkm_wr32(device, sec2->addr + 0x004, 0x00000040);
sec2               54 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		nvkm_wr32(device, sec2->addr + 0x004, intr);
sec2               62 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2 = container_of(work, typeof(*sec2), work);
sec2               64 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	if (!sec2->queue) {
sec2               65 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		nvkm_warn(&sec2->engine.subdev,
sec2               70 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	nvkm_msgqueue_recv(sec2->queue);
sec2               77 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2 = nvkm_sec2(engine);
sec2               78 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_subdev *subdev = &sec2->engine.subdev;
sec2               80 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	if (!sec2->addr) {
sec2               81 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		sec2->addr = nvkm_top_addr(subdev->device, subdev->index);
sec2               82 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 		if (WARN_ON(!sec2->addr))
sec2               86 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	return nvkm_falcon_v1_new(subdev, "SEC2", sec2->addr, &sec2->falcon);
sec2               92 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2 = nvkm_sec2(engine);
sec2               93 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	flush_work(&sec2->work);
sec2              109 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	struct nvkm_sec2 *sec2;
sec2              111 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	if (!(sec2 = *psec2 = kzalloc(sizeof(*sec2), GFP_KERNEL)))
sec2              113 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	sec2->addr = addr;
sec2              114 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	INIT_WORK(&sec2->work, nvkm_sec2_recv);
sec2              116 drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c 	return nvkm_engine_ctor(&nvkm_sec2, device, index, true, &sec2->engine);
sec2             1024 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/acr_r352.c 		queue = sb->subdev.device->sec2->queue;
sec2              166 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/acr_r361.c 	const struct nvkm_sec2 *sec = acr->subdev->device->sec2;
sec2               90 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/acr_r370.c 	const struct nvkm_sec2 *sec = acr->subdev->device->sec2;
sec2              140 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/base.c 		nvkm_engine_ref(&subdev->device->sec2->engine);
sec2              141 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/base.c 		sb->boot_falcon = subdev->device->sec2->falcon;
sec2              143 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/ls_ucode_msgqueue.c 	struct nvkm_sec2 *sec = sb->subdev.device->sec2;
sec2              164 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/ls_ucode_msgqueue.c 	struct nvkm_sec2 *sec = device->sec2;
sec2             2195 drivers/md/dm-integrity.c 				sector_t sec2, area2, offset2;
sec2             2199 drivers/md/dm-integrity.c 				sec2 = journal_entry_get_sector(je2);
sec2             2200 drivers/md/dm-integrity.c 				get_area_and_offset(ic, sec2, &area2, &offset2);
sec2              277 drivers/rtc/rtc-sh.c 	unsigned int sec128, sec2, yr, yr100, cf_bit;
sec2              312 drivers/rtc/rtc-sh.c 		sec2 = readb(rtc->regbase + R64CNT);
sec2              316 drivers/rtc/rtc-sh.c 	} while (cf_bit != 0 || ((sec128 ^ sec2) & RTC_BIT_INVERTED) != 0);