dcc              2713 drivers/atm/he.c 	static long mcc = 0, oec = 0, dcc = 0, cec = 0;
dcc              2730 drivers/atm/he.c 	dcc += he_readl(he_dev, DCC);
dcc              2736 drivers/atm/he.c 							mcc, oec, dcc, cec);
dcc                63 drivers/bus/vexpress-config.c 		u32 *position, u32 *dcc)
dcc                71 drivers/bus/vexpress-config.c 	vexpress_config_find_prop(node, "arm,vexpress,dcc", dcc);
dcc              2705 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			  struct dc_plane_dcc_param *dcc,
dcc              2750 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	dcc->enable = 1;
dcc              2751 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	dcc->meta_pitch =
dcc              2753 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	dcc->independent_64b_blks = i64b;
dcc              2770 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			     struct dc_plane_dcc_param *dcc,
dcc              2779 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	memset(dcc, 0, sizeof(*dcc));
dcc              2882 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 						tiling_flags, dcc, address,
dcc              3055 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 					   &plane_info->dcc, address,
dcc              3114 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	dc_plane_state->dcc = plane_info.dcc;
dcc              4563 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			&plane_state->plane_size, &plane_state->dcc,
dcc              5794 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 				 bundle->plane_infos[planes_count].dcc.enable);
dcc               318 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		input->src.dcc = pipe->plane_state->dcc.enable ? 1 : 0;
dcc               328 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		input->src.dcc = pipe->plane_res.dpp->ctx->dc->res_pool->hubbub->funcs->
dcc               332 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 	input->src.meta_pitch          = pipe->plane_state->dcc.meta_pitch;
dcc               965 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 				v->dcc_enable[input_idx] = pipe->plane_state->dcc.enable ? dcn_bw_yes : dcn_bw_no;
dcc              1420 drivers/gpu/drm/amd/display/dc/core/dc.c 	if (u->plane_info->dcc.enable != u->surface->dcc.enable
dcc              1421 drivers/gpu/drm/amd/display/dc/core/dc.c 			|| u->plane_info->dcc.independent_64b_blks != u->surface->dcc.independent_64b_blks
dcc              1422 drivers/gpu/drm/amd/display/dc/core/dc.c 			|| u->plane_info->dcc.meta_pitch != u->surface->dcc.meta_pitch) {
dcc              1733 drivers/gpu/drm/amd/display/dc/core/dc.c 		surface->dcc =
dcc              1734 drivers/gpu/drm/amd/display/dc/core/dc.c 				srf_update->plane_info->dcc;
dcc               165 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 				plane_state->dcc.enable,
dcc               726 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_plane_dcc_param dcc;
dcc               775 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_plane_dcc_param dcc;
dcc               510 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	struct dc_plane_dcc_param *dcc,
dcc               642 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	struct dc_plane_dcc_param *dcc,
dcc               167 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	struct dc_plane_dcc_param *dcc)
dcc               180 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 		meta_pitch = dcc->meta_pitch - 1;
dcc               182 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 		meta_pitch_c = dcc->meta_pitch_c - 1;
dcc               185 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 		meta_pitch = dcc->meta_pitch - 1;
dcc               190 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	if (!dcc->enable) {
dcc               531 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	struct dc_plane_dcc_param *dcc,
dcc               535 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	hubp1_dcc_control(hubp, dcc->enable, dcc->independent_64b_blks);
dcc               537 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	hubp1_program_size(hubp, format, plane_size, dcc);
dcc               666 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h 	struct dc_plane_dcc_param *dcc,
dcc               687 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h 	struct dc_plane_dcc_param *dcc);
dcc              2396 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 			&plane_state->dcc,
dcc              1179 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 				if (!plane->dcc.enable)
dcc               326 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	struct dc_plane_dcc_param *dcc)
dcc               342 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 		meta_pitch = dcc->meta_pitch - 1;
dcc               344 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 		meta_pitch_c = dcc->meta_pitch_c - 1;
dcc               347 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 		meta_pitch = dcc->meta_pitch - 1;
dcc               352 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	if (!dcc->enable) {
dcc               522 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	struct dc_plane_dcc_param *dcc,
dcc               528 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	hubp2_dcc_control(hubp, dcc->enable, dcc->independent_64b_blks);
dcc               530 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	hubp2_program_size(hubp, format, plane_size, dcc);
dcc               296 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h 	struct dc_plane_dcc_param *dcc);
dcc               313 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h 	struct dc_plane_dcc_param *dcc,
dcc              1920 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pipes[pipe_cnt].pipe.src.dcc = false;
dcc              2090 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				pipes[pipe_cnt].pipe.src.meta_pitch = pln->dcc.meta_pitch;
dcc              2091 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				pipes[pipe_cnt].pipe.src.meta_pitch_c = pln->dcc.meta_pitch_c;
dcc              2094 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				pipes[pipe_cnt].pipe.src.meta_pitch = pln->dcc.meta_pitch;
dcc              2096 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			pipes[pipe_cnt].pipe.src.dcc = pln->dcc.enable;
dcc               215 drivers/gpu/drm/amd/display/dc/dml/display_mode_structs.h 	unsigned char dcc;
dcc               419 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c 						ip->dcc_supported : src->dcc && ip->dcc_supported;
dcc              1182 drivers/gpu/drm/amd/display/dc/dml/dml1_display_rq_dlg_calc.c 	dcc_en = e2e_pipe_param.pipe.src.dcc;
dcc               114 drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h 		struct dc_plane_dcc_param *dcc,
dcc               158 drivers/gpu/drm/amd/display/dc/inc/hw/mem_input.h 		struct dc_plane_dcc_param *dcc,
dcc               651 drivers/gpu/drm/i915/i915_gem_fence_reg.c 		u32 dcc = intel_uncore_read(uncore, DCC);
dcc               662 drivers/gpu/drm/i915/i915_gem_fence_reg.c 		switch (dcc & DCC_ADDRESSING_MODE_MASK) {
dcc               669 drivers/gpu/drm/i915/i915_gem_fence_reg.c 			if (dcc & DCC_CHANNEL_XOR_DISABLE) {
dcc               676 drivers/gpu/drm/i915/i915_gem_fence_reg.c 			} else if ((dcc & DCC_CHANNEL_XOR_BIT_17) == 0) {
dcc               695 drivers/gpu/drm/i915/i915_gem_fence_reg.c 		if (dcc == 0xffffffff) {
dcc               148 drivers/misc/vexpress-syscfg.c 	u32 site, position, dcc;
dcc               152 drivers/misc/vexpress-syscfg.c 				&position, &dcc);
dcc               192 drivers/misc/vexpress-syscfg.c 				func, site, position, dcc,
dcc               195 drivers/misc/vexpress-syscfg.c 		func->template[i] = SYS_CFGCTRL_DCC(dcc);
dcc               560 fs/ext4/balloc.c 	struct percpu_counter *dcc = &sbi->s_dirtyclusters_counter;
dcc               563 fs/ext4/balloc.c 	dirty_clusters = percpu_counter_read_positive(dcc);
dcc               576 fs/ext4/balloc.c 		dirty_clusters = percpu_counter_sum_positive(dcc);
dcc               938 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc               944 fs/f2fs/segment.c 	pend_list = &dcc->pend_list[plist_idx(len)];
dcc               960 fs/f2fs/segment.c 	atomic_inc(&dcc->discard_cmd_cnt);
dcc               961 fs/f2fs/segment.c 	dcc->undiscard_blks += len;
dcc               972 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc               978 fs/f2fs/segment.c 	rb_insert_color_cached(&dc->rb_node, &dcc->root, leftmost);
dcc               983 fs/f2fs/segment.c static void __detach_discard_cmd(struct discard_cmd_control *dcc,
dcc               987 fs/f2fs/segment.c 		atomic_sub(dc->queued, &dcc->queued_discard);
dcc               990 fs/f2fs/segment.c 	rb_erase_cached(&dc->rb_node, &dcc->root);
dcc               991 fs/f2fs/segment.c 	dcc->undiscard_blks -= dc->len;
dcc               995 fs/f2fs/segment.c 	atomic_dec(&dcc->discard_cmd_cnt);
dcc              1001 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1022 fs/f2fs/segment.c 	__detach_discard_cmd(dcc, dc);
dcc              1122 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1124 fs/f2fs/segment.c 					&(dcc->fstrim_list) : &(dcc->wait_list);
dcc              1193 fs/f2fs/segment.c 		atomic_inc(&dcc->queued_discard);
dcc              1205 fs/f2fs/segment.c 		atomic_inc(&dcc->issued_discard);
dcc              1226 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1238 fs/f2fs/segment.c 	p = f2fs_lookup_rb_tree_for_insert(sbi, &dcc->root, &parent,
dcc              1249 fs/f2fs/segment.c static void __relocate_discard_cmd(struct discard_cmd_control *dcc,
dcc              1252 fs/f2fs/segment.c 	list_move_tail(&dc->list, &dcc->pend_list[plist_idx(dc->len)]);
dcc              1258 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1267 fs/f2fs/segment.c 	dcc->undiscard_blks -= di.len;
dcc              1271 fs/f2fs/segment.c 		dcc->undiscard_blks += dc->len;
dcc              1272 fs/f2fs/segment.c 		__relocate_discard_cmd(dcc, dc);
dcc              1286 fs/f2fs/segment.c 			dcc->undiscard_blks += dc->len;
dcc              1287 fs/f2fs/segment.c 			__relocate_discard_cmd(dcc, dc);
dcc              1296 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1306 fs/f2fs/segment.c 	dc = (struct discard_cmd *)f2fs_lookup_rb_tree_ret(&dcc->root,
dcc              1348 fs/f2fs/segment.c 			dcc->undiscard_blks += di.len;
dcc              1349 fs/f2fs/segment.c 			__relocate_discard_cmd(dcc, prev_dc);
dcc              1362 fs/f2fs/segment.c 			dcc->undiscard_blks += di.len;
dcc              1363 fs/f2fs/segment.c 			__relocate_discard_cmd(dcc, next_dc);
dcc              1407 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1412 fs/f2fs/segment.c 	unsigned int pos = dcc->next_pos;
dcc              1416 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              1417 fs/f2fs/segment.c 	dc = (struct discard_cmd *)f2fs_lookup_rb_tree_ret(&dcc->root,
dcc              1439 fs/f2fs/segment.c 		dcc->next_pos = dc->lstart + dc->len;
dcc              1454 fs/f2fs/segment.c 		dcc->next_pos = 0;
dcc              1456 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc              1467 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1488 fs/f2fs/segment.c 		pend_list = &dcc->pend_list[i];
dcc              1490 fs/f2fs/segment.c 		mutex_lock(&dcc->cmd_lock);
dcc              1493 fs/f2fs/segment.c 		if (unlikely(dcc->rbtree_check))
dcc              1495 fs/f2fs/segment.c 								&dcc->root));
dcc              1517 fs/f2fs/segment.c 		mutex_unlock(&dcc->cmd_lock);
dcc              1531 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1537 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              1539 fs/f2fs/segment.c 		pend_list = &dcc->pend_list[i];
dcc              1546 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc              1559 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1563 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              1571 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc              1580 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1582 fs/f2fs/segment.c 					&(dcc->fstrim_list) : &(dcc->wait_list);
dcc              1590 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              1607 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc              1638 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1642 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              1643 fs/f2fs/segment.c 	dc = (struct discard_cmd *)f2fs_lookup_rb_tree(&dcc->root,
dcc              1653 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc              1661 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1663 fs/f2fs/segment.c 	if (dcc && dcc->f2fs_issue_discard) {
dcc              1664 fs/f2fs/segment.c 		struct task_struct *discard_thread = dcc->f2fs_issue_discard;
dcc              1666 fs/f2fs/segment.c 		dcc->f2fs_issue_discard = NULL;
dcc              1674 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1679 fs/f2fs/segment.c 					dcc->discard_granularity);
dcc              1687 fs/f2fs/segment.c 	f2fs_bug_on(sbi, atomic_read(&dcc->discard_cmd_cnt));
dcc              1694 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1695 fs/f2fs/segment.c 	wait_queue_head_t *q = &dcc->discard_wait_queue;
dcc              1704 fs/f2fs/segment.c 					dcc->discard_granularity);
dcc              1708 fs/f2fs/segment.c 				dcc->discard_wake,
dcc              1711 fs/f2fs/segment.c 		if (dcc->discard_wake)
dcc              1712 fs/f2fs/segment.c 			dcc->discard_wake = 0;
dcc              1715 fs/f2fs/segment.c 		if (atomic_read(&dcc->queued_discard))
dcc              1934 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              1935 fs/f2fs/segment.c 	struct list_head *head = &dcc->entry_list;
dcc              2025 fs/f2fs/segment.c 		dcc->nr_discards -= total_len;
dcc              2034 fs/f2fs/segment.c 	struct discard_cmd_control *dcc;
dcc              2038 fs/f2fs/segment.c 		dcc = SM_I(sbi)->dcc_info;
dcc              2042 fs/f2fs/segment.c 	dcc = f2fs_kzalloc(sbi, sizeof(struct discard_cmd_control), GFP_KERNEL);
dcc              2043 fs/f2fs/segment.c 	if (!dcc)
dcc              2046 fs/f2fs/segment.c 	dcc->discard_granularity = DEFAULT_DISCARD_GRANULARITY;
dcc              2047 fs/f2fs/segment.c 	INIT_LIST_HEAD(&dcc->entry_list);
dcc              2049 fs/f2fs/segment.c 		INIT_LIST_HEAD(&dcc->pend_list[i]);
dcc              2050 fs/f2fs/segment.c 	INIT_LIST_HEAD(&dcc->wait_list);
dcc              2051 fs/f2fs/segment.c 	INIT_LIST_HEAD(&dcc->fstrim_list);
dcc              2052 fs/f2fs/segment.c 	mutex_init(&dcc->cmd_lock);
dcc              2053 fs/f2fs/segment.c 	atomic_set(&dcc->issued_discard, 0);
dcc              2054 fs/f2fs/segment.c 	atomic_set(&dcc->queued_discard, 0);
dcc              2055 fs/f2fs/segment.c 	atomic_set(&dcc->discard_cmd_cnt, 0);
dcc              2056 fs/f2fs/segment.c 	dcc->nr_discards = 0;
dcc              2057 fs/f2fs/segment.c 	dcc->max_discards = MAIN_SEGS(sbi) << sbi->log_blocks_per_seg;
dcc              2058 fs/f2fs/segment.c 	dcc->undiscard_blks = 0;
dcc              2059 fs/f2fs/segment.c 	dcc->next_pos = 0;
dcc              2060 fs/f2fs/segment.c 	dcc->root = RB_ROOT_CACHED;
dcc              2061 fs/f2fs/segment.c 	dcc->rbtree_check = false;
dcc              2063 fs/f2fs/segment.c 	init_waitqueue_head(&dcc->discard_wait_queue);
dcc              2064 fs/f2fs/segment.c 	SM_I(sbi)->dcc_info = dcc;
dcc              2066 fs/f2fs/segment.c 	dcc->f2fs_issue_discard = kthread_run(issue_discard_thread, sbi,
dcc              2068 fs/f2fs/segment.c 	if (IS_ERR(dcc->f2fs_issue_discard)) {
dcc              2069 fs/f2fs/segment.c 		err = PTR_ERR(dcc->f2fs_issue_discard);
dcc              2070 fs/f2fs/segment.c 		kvfree(dcc);
dcc              2080 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              2082 fs/f2fs/segment.c 	if (!dcc)
dcc              2091 fs/f2fs/segment.c 	if (unlikely(atomic_read(&dcc->discard_cmd_cnt)))
dcc              2094 fs/f2fs/segment.c 	kvfree(dcc);
dcc              2747 fs/f2fs/segment.c 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc              2758 fs/f2fs/segment.c 	mutex_lock(&dcc->cmd_lock);
dcc              2759 fs/f2fs/segment.c 	if (unlikely(dcc->rbtree_check))
dcc              2761 fs/f2fs/segment.c 								&dcc->root));
dcc              2763 fs/f2fs/segment.c 	dc = (struct discard_cmd *)f2fs_lookup_rb_tree_ret(&dcc->root,
dcc              2781 fs/f2fs/segment.c 			list_move_tail(&dc->list, &dcc->fstrim_list);
dcc              2794 fs/f2fs/segment.c 			mutex_unlock(&dcc->cmd_lock);
dcc              2810 fs/f2fs/segment.c 	mutex_unlock(&dcc->cmd_lock);
dcc               853 fs/f2fs/segment.h 	struct discard_cmd_control *dcc = SM_I(sbi)->dcc_info;
dcc               860 fs/f2fs/segment.h 	mutex_lock(&dcc->cmd_lock);
dcc               862 fs/f2fs/segment.h 		if (i + 1 < dcc->discard_granularity)
dcc               864 fs/f2fs/segment.h 		if (!list_empty(&dcc->pend_list[i])) {
dcc               869 fs/f2fs/segment.h 	mutex_unlock(&dcc->cmd_lock);
dcc               873 fs/f2fs/segment.h 	dcc->discard_wake = 1;
dcc               874 fs/f2fs/segment.h 	wake_up_interruptible_all(&dcc->discard_wait_queue);
dcc                27 include/linux/vexpress.h 		u32 *position, u32 *dcc);