reg_table 1569 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c struct atom_mc_reg_table *reg_table) reg_table 1577 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c memset(reg_table, 0, sizeof(struct atom_mc_reg_table)); reg_table 1606 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_address[i].s1 = reg_table 1608 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_address[i].pre_reg_data = reg_table 1614 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->last = i; reg_table 1620 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_table_entry[num_ranges].mclk_max = reg_table 1623 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c for (i = 0, j = 1; i < reg_table->last; i++) { reg_table 1624 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c if ((reg_table->mc_reg_address[i].pre_reg_data & LOW_NIBBLE_MASK) == DATA_FROM_TABLE) { reg_table 1625 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i] = reg_table 1628 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c } else if ((reg_table->mc_reg_address[i].pre_reg_data & LOW_NIBBLE_MASK) == DATA_EQU_PREV) { reg_table 1629 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i] = reg_table 1630 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i - 1]; reg_table 1640 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c reg_table->num_entries = num_ranges; reg_table 193 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.h struct atom_mc_reg_table *reg_table); reg_table 821 drivers/gpu/drm/i915/i915_cmd_parser.c const struct drm_i915_reg_descriptor *reg_table, reg_table 829 drivers/gpu/drm/i915/i915_cmd_parser.c u32 curr = i915_mmio_reg_offset(reg_table[i].addr); reg_table 343 drivers/gpu/drm/radeon/radeon.h struct atom_mc_reg_table *reg_table); reg_table 3987 drivers/gpu/drm/radeon/radeon_atombios.c struct atom_mc_reg_table *reg_table) reg_table 3995 drivers/gpu/drm/radeon/radeon_atombios.c memset(reg_table, 0, sizeof(struct atom_mc_reg_table)); reg_table 4024 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_address[i].s1 = reg_table 4026 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_address[i].pre_reg_data = reg_table 4032 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->last = i; reg_table 4038 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_table_entry[num_ranges].mclk_max = reg_table 4041 drivers/gpu/drm/radeon/radeon_atombios.c for (i = 0, j = 1; i < reg_table->last; i++) { reg_table 4042 drivers/gpu/drm/radeon/radeon_atombios.c if ((reg_table->mc_reg_address[i].pre_reg_data & LOW_NIBBLE_MASK) == DATA_FROM_TABLE) { reg_table 4043 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i] = reg_table 4046 drivers/gpu/drm/radeon/radeon_atombios.c } else if ((reg_table->mc_reg_address[i].pre_reg_data & LOW_NIBBLE_MASK) == DATA_EQU_PREV) { reg_table 4047 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i] = reg_table 4048 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->mc_reg_table_entry[num_ranges].mc_data[i - 1]; reg_table 4058 drivers/gpu/drm/radeon/radeon_atombios.c reg_table->num_entries = num_ranges; reg_table 412 drivers/media/i2c/imx214.c const struct reg_8 *reg_table; reg_table 417 drivers/media/i2c/imx214.c .reg_table = mode_4096x2304, reg_table 422 drivers/media/i2c/imx214.c .reg_table = mode_1920x1080, reg_table 734 drivers/media/i2c/imx214.c ret = imx214_write_table(imx214, mode->reg_table); reg_table 291 drivers/net/wireless/mediatek/mt7601u/phy.c const struct reg_table *t; reg_table 303 drivers/net/wireless/mediatek/mt7601u/phy.c const struct reg_table *t;