Searched refs:rdev (Results 1 - 200 of 474) sorted by relevance

123

/linux-4.4.14/net/ieee802154/
H A Drdev-ops.h10 rdev_add_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, rdev_add_virtual_intf_deprecated() argument
15 return rdev->ops->add_virtual_intf_deprecated(&rdev->wpan_phy, name, rdev_add_virtual_intf_deprecated()
20 rdev_del_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, rdev_del_virtual_intf_deprecated() argument
23 rdev->ops->del_virtual_intf_deprecated(&rdev->wpan_phy, dev); rdev_del_virtual_intf_deprecated()
27 rdev_suspend(struct cfg802154_registered_device *rdev) rdev_suspend() argument
30 trace_802154_rdev_suspend(&rdev->wpan_phy); rdev_suspend()
31 ret = rdev->ops->suspend(&rdev->wpan_phy); rdev_suspend()
32 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_suspend()
37 rdev_resume(struct cfg802154_registered_device *rdev) rdev_resume() argument
40 trace_802154_rdev_resume(&rdev->wpan_phy); rdev_resume()
41 ret = rdev->ops->resume(&rdev->wpan_phy); rdev_resume()
42 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_resume()
47 rdev_add_virtual_intf(struct cfg802154_registered_device *rdev, char *name, rdev_add_virtual_intf() argument
53 trace_802154_rdev_add_virtual_intf(&rdev->wpan_phy, name, type, rdev_add_virtual_intf()
55 ret = rdev->ops->add_virtual_intf(&rdev->wpan_phy, name, rdev_add_virtual_intf()
58 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_add_virtual_intf()
63 rdev_del_virtual_intf(struct cfg802154_registered_device *rdev, rdev_del_virtual_intf() argument
68 trace_802154_rdev_del_virtual_intf(&rdev->wpan_phy, wpan_dev); rdev_del_virtual_intf()
69 ret = rdev->ops->del_virtual_intf(&rdev->wpan_phy, wpan_dev); rdev_del_virtual_intf()
70 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_del_virtual_intf()
75 rdev_set_channel(struct cfg802154_registered_device *rdev, u8 page, u8 channel) rdev_set_channel() argument
79 trace_802154_rdev_set_channel(&rdev->wpan_phy, page, channel); rdev_set_channel()
80 ret = rdev->ops->set_channel(&rdev->wpan_phy, page, channel); rdev_set_channel()
81 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_channel()
86 rdev_set_cca_mode(struct cfg802154_registered_device *rdev, rdev_set_cca_mode() argument
91 trace_802154_rdev_set_cca_mode(&rdev->wpan_phy, cca); rdev_set_cca_mode()
92 ret = rdev->ops->set_cca_mode(&rdev->wpan_phy, cca); rdev_set_cca_mode()
93 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_cca_mode()
98 rdev_set_cca_ed_level(struct cfg802154_registered_device *rdev, s32 ed_level) rdev_set_cca_ed_level() argument
102 trace_802154_rdev_set_cca_ed_level(&rdev->wpan_phy, ed_level); rdev_set_cca_ed_level()
103 ret = rdev->ops->set_cca_ed_level(&rdev->wpan_phy, ed_level); rdev_set_cca_ed_level()
104 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_cca_ed_level()
109 rdev_set_tx_power(struct cfg802154_registered_device *rdev, rdev_set_tx_power() argument
114 trace_802154_rdev_set_tx_power(&rdev->wpan_phy, power); rdev_set_tx_power()
115 ret = rdev->ops->set_tx_power(&rdev->wpan_phy, power); rdev_set_tx_power()
116 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_tx_power()
121 rdev_set_pan_id(struct cfg802154_registered_device *rdev, rdev_set_pan_id() argument
126 trace_802154_rdev_set_pan_id(&rdev->wpan_phy, wpan_dev, pan_id); rdev_set_pan_id()
127 ret = rdev->ops->set_pan_id(&rdev->wpan_phy, wpan_dev, pan_id); rdev_set_pan_id()
128 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_pan_id()
133 rdev_set_short_addr(struct cfg802154_registered_device *rdev, rdev_set_short_addr() argument
138 trace_802154_rdev_set_short_addr(&rdev->wpan_phy, wpan_dev, short_addr); rdev_set_short_addr()
139 ret = rdev->ops->set_short_addr(&rdev->wpan_phy, wpan_dev, short_addr); rdev_set_short_addr()
140 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_short_addr()
145 rdev_set_backoff_exponent(struct cfg802154_registered_device *rdev, rdev_set_backoff_exponent() argument
150 trace_802154_rdev_set_backoff_exponent(&rdev->wpan_phy, wpan_dev, rdev_set_backoff_exponent()
152 ret = rdev->ops->set_backoff_exponent(&rdev->wpan_phy, wpan_dev, rdev_set_backoff_exponent()
154 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_backoff_exponent()
159 rdev_set_max_csma_backoffs(struct cfg802154_registered_device *rdev, rdev_set_max_csma_backoffs() argument
164 trace_802154_rdev_set_csma_backoffs(&rdev->wpan_phy, wpan_dev, rdev_set_max_csma_backoffs()
166 ret = rdev->ops->set_max_csma_backoffs(&rdev->wpan_phy, wpan_dev, rdev_set_max_csma_backoffs()
168 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_max_csma_backoffs()
173 rdev_set_max_frame_retries(struct cfg802154_registered_device *rdev, rdev_set_max_frame_retries() argument
178 trace_802154_rdev_set_max_frame_retries(&rdev->wpan_phy, wpan_dev, rdev_set_max_frame_retries()
180 ret = rdev->ops->set_max_frame_retries(&rdev->wpan_phy, wpan_dev, rdev_set_max_frame_retries()
182 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_max_frame_retries()
187 rdev_set_lbt_mode(struct cfg802154_registered_device *rdev, rdev_set_lbt_mode() argument
192 trace_802154_rdev_set_lbt_mode(&rdev->wpan_phy, wpan_dev, mode); rdev_set_lbt_mode()
193 ret = rdev->ops->set_lbt_mode(&rdev->wpan_phy, wpan_dev, mode); rdev_set_lbt_mode()
194 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_lbt_mode()
199 rdev_set_ackreq_default(struct cfg802154_registered_device *rdev, rdev_set_ackreq_default() argument
204 trace_802154_rdev_set_ackreq_default(&rdev->wpan_phy, wpan_dev, rdev_set_ackreq_default()
206 ret = rdev->ops->set_ackreq_default(&rdev->wpan_phy, wpan_dev, ackreq); rdev_set_ackreq_default()
207 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); rdev_set_ackreq_default()
214 rdev_get_llsec_table(struct cfg802154_registered_device *rdev, rdev_get_llsec_table() argument
218 rdev->ops->get_llsec_table(&rdev->wpan_phy, wpan_dev, table); rdev_get_llsec_table()
222 rdev_lock_llsec_table(struct cfg802154_registered_device *rdev, rdev_lock_llsec_table() argument
225 rdev->ops->lock_llsec_table(&rdev->wpan_phy, wpan_dev); rdev_lock_llsec_table()
229 rdev_unlock_llsec_table(struct cfg802154_registered_device *rdev, rdev_unlock_llsec_table() argument
232 rdev->ops->unlock_llsec_table(&rdev->wpan_phy, wpan_dev); rdev_unlock_llsec_table()
236 rdev_get_llsec_params(struct cfg802154_registered_device *rdev, rdev_get_llsec_params() argument
240 return rdev->ops->get_llsec_params(&rdev->wpan_phy, wpan_dev, params); rdev_get_llsec_params()
244 rdev_set_llsec_params(struct cfg802154_registered_device *rdev, rdev_set_llsec_params() argument
249 return rdev->ops->set_llsec_params(&rdev->wpan_phy, wpan_dev, params, rdev_set_llsec_params()
254 rdev_add_llsec_key(struct cfg802154_registered_device *rdev, rdev_add_llsec_key() argument
259 return rdev->ops->add_llsec_key(&rdev->wpan_phy, wpan_dev, id, key); rdev_add_llsec_key()
263 rdev_del_llsec_key(struct cfg802154_registered_device *rdev, rdev_del_llsec_key() argument
267 return rdev->ops->del_llsec_key(&rdev->wpan_phy, wpan_dev, id); rdev_del_llsec_key()
271 rdev_add_seclevel(struct cfg802154_registered_device *rdev, rdev_add_seclevel() argument
275 return rdev->ops->add_seclevel(&rdev->wpan_phy, wpan_dev, sl); rdev_add_seclevel()
279 rdev_del_seclevel(struct cfg802154_registered_device *rdev, rdev_del_seclevel() argument
283 return rdev->ops->del_seclevel(&rdev->wpan_phy, wpan_dev, sl); rdev_del_seclevel()
287 rdev_add_device(struct cfg802154_registered_device *rdev, rdev_add_device() argument
291 return rdev->ops->add_device(&rdev->wpan_phy, wpan_dev, dev_desc); rdev_add_device()
295 rdev_del_device(struct cfg802154_registered_device *rdev, rdev_del_device() argument
298 return rdev->ops->del_device(&rdev->wpan_phy, wpan_dev, extended_addr); rdev_del_device()
302 rdev_add_devkey(struct cfg802154_registered_device *rdev, rdev_add_devkey() argument
306 return rdev->ops->add_devkey(&rdev->wpan_phy, wpan_dev, extended_addr, rdev_add_devkey()
311 rdev_del_devkey(struct cfg802154_registered_device *rdev, rdev_del_devkey() argument
315 return rdev->ops->del_devkey(&rdev->wpan_phy, wpan_dev, extended_addr, rdev_del_devkey()
H A Dcore.c84 struct cfg802154_registered_device *result = NULL, *rdev; cfg802154_rdev_by_wpan_phy_idx() local
88 list_for_each_entry(rdev, &cfg802154_rdev_list, list) { cfg802154_rdev_by_wpan_phy_idx()
89 if (rdev->wpan_phy_idx == wpan_phy_idx) { cfg802154_rdev_by_wpan_phy_idx()
90 result = rdev; cfg802154_rdev_by_wpan_phy_idx()
100 struct cfg802154_registered_device *rdev; wpan_phy_idx_to_wpan_phy() local
104 rdev = cfg802154_rdev_by_wpan_phy_idx(wpan_phy_idx); wpan_phy_idx_to_wpan_phy()
105 if (!rdev) wpan_phy_idx_to_wpan_phy()
107 return &rdev->wpan_phy; wpan_phy_idx_to_wpan_phy()
114 struct cfg802154_registered_device *rdev; wpan_phy_new() local
117 alloc_size = sizeof(*rdev) + priv_size; wpan_phy_new()
118 rdev = kzalloc(alloc_size, GFP_KERNEL); wpan_phy_new()
119 if (!rdev) wpan_phy_new()
122 rdev->ops = ops; wpan_phy_new()
124 rdev->wpan_phy_idx = atomic_inc_return(&wpan_phy_counter); wpan_phy_new()
126 if (unlikely(rdev->wpan_phy_idx < 0)) { wpan_phy_new()
129 kfree(rdev); wpan_phy_new()
134 rdev->wpan_phy_idx--; wpan_phy_new()
136 INIT_LIST_HEAD(&rdev->wpan_dev_list); wpan_phy_new()
137 device_initialize(&rdev->wpan_phy.dev); wpan_phy_new()
138 dev_set_name(&rdev->wpan_phy.dev, PHY_NAME "%d", rdev->wpan_phy_idx); wpan_phy_new()
140 rdev->wpan_phy.dev.class = &wpan_phy_class; wpan_phy_new()
141 rdev->wpan_phy.dev.platform_data = rdev; wpan_phy_new()
143 init_waitqueue_head(&rdev->dev_wait); wpan_phy_new()
145 return &rdev->wpan_phy; wpan_phy_new()
151 struct cfg802154_registered_device *rdev = wpan_phy_to_rdev(phy); wpan_phy_register() local
161 list_add_rcu(&rdev->list, &cfg802154_rdev_list); wpan_phy_register()
175 struct cfg802154_registered_device *rdev = wpan_phy_to_rdev(phy); wpan_phy_unregister() local
177 wait_event(rdev->dev_wait, ({ wpan_phy_unregister()
180 __count = rdev->opencount; wpan_phy_unregister()
188 WARN_ON(!list_empty(&rdev->wpan_dev_list)); wpan_phy_unregister()
193 list_del_rcu(&rdev->list); wpan_phy_unregister()
210 void cfg802154_dev_free(struct cfg802154_registered_device *rdev) cfg802154_dev_free() argument
212 kfree(rdev); cfg802154_dev_free()
216 cfg802154_update_iface_num(struct cfg802154_registered_device *rdev, cfg802154_update_iface_num() argument
221 rdev->num_running_ifaces += num; cfg802154_update_iface_num()
229 struct cfg802154_registered_device *rdev; cfg802154_netdev_notifier_call() local
234 rdev = wpan_phy_to_rdev(wpan_dev->wpan_phy); cfg802154_netdev_notifier_call()
242 wpan_dev->identifier = ++rdev->wpan_dev_id; cfg802154_netdev_notifier_call()
243 list_add_rcu(&wpan_dev->list, &rdev->wpan_dev_list); cfg802154_netdev_notifier_call()
244 rdev->devlist_generation++; cfg802154_netdev_notifier_call()
249 cfg802154_update_iface_num(rdev, wpan_dev->iftype, -1); cfg802154_netdev_notifier_call()
251 rdev->opencount--; cfg802154_netdev_notifier_call()
252 wake_up(&rdev->dev_wait); cfg802154_netdev_notifier_call()
255 cfg802154_update_iface_num(rdev, wpan_dev->iftype, 1); cfg802154_netdev_notifier_call()
257 rdev->opencount++; cfg802154_netdev_notifier_call()
268 rdev->devlist_generation++; cfg802154_netdev_notifier_call()
H A Dsysfs.c23 #include "rdev-ops.h"
55 struct cfg802154_registered_device *rdev = dev_to_rdev(dev); wpan_phy_release() local
57 cfg802154_dev_free(rdev); wpan_phy_release()
70 struct cfg802154_registered_device *rdev = dev_to_rdev(dev); wpan_phy_suspend() local
73 if (rdev->ops->suspend) { wpan_phy_suspend()
75 ret = rdev_suspend(rdev); wpan_phy_suspend()
84 struct cfg802154_registered_device *rdev = dev_to_rdev(dev); wpan_phy_resume() local
87 if (rdev->ops->resume) { wpan_phy_resume()
89 ret = rdev_resume(rdev); wpan_phy_resume()
H A Dnl802154.c26 #include "rdev-ops.h"
60 struct cfg802154_registered_device *rdev; __cfg802154_wpan_dev_from_attrs() local
80 list_for_each_entry(rdev, &cfg802154_rdev_list, list) { __cfg802154_wpan_dev_from_attrs()
85 if (have_wpan_dev_id && rdev->wpan_phy_idx != wpan_phy_idx) __cfg802154_wpan_dev_from_attrs()
88 list_for_each_entry(wpan_dev, &rdev->wpan_dev_list, list) { __cfg802154_wpan_dev_from_attrs()
114 struct cfg802154_registered_device *rdev = NULL, *tmp; __cfg802154_rdev_from_attrs() local
125 rdev = cfg802154_rdev_by_wpan_phy_idx( __cfg802154_rdev_from_attrs()
146 if (rdev && tmp != rdev) __cfg802154_rdev_from_attrs()
148 rdev = tmp; __cfg802154_rdev_from_attrs()
168 if (rdev && tmp != rdev) __cfg802154_rdev_from_attrs()
171 rdev = tmp; __cfg802154_rdev_from_attrs()
175 if (!rdev) __cfg802154_rdev_from_attrs()
180 return rdev; __cfg802154_rdev_from_attrs()
253 struct cfg802154_registered_device **rdev, nl802154_prepare_wpan_dev_dump()
273 *rdev = wpan_phy_to_rdev((*wpan_dev)->wpan_phy); nl802154_prepare_wpan_dev_dump()
275 cb->args[0] = (*rdev)->wpan_phy_idx + 1; nl802154_prepare_wpan_dev_dump()
286 *rdev = wpan_phy_to_rdev(wpan_phy); nl802154_prepare_wpan_dev_dump()
289 list_for_each_entry(tmp, &(*rdev)->wpan_dev_list, list) { nl802154_prepare_wpan_dev_dump()
309 nl802154_finish_wpan_dev_dump(struct cfg802154_registered_device *rdev) nl802154_finish_wpan_dev_dump() argument
346 nl802154_send_wpan_phy_channels(struct cfg802154_registered_device *rdev, nl802154_send_wpan_phy_channels() argument
358 rdev->wpan_phy.supported.channels[page])) nl802154_send_wpan_phy_channels()
368 struct cfg802154_registered_device *rdev) nl802154_put_capabilities()
370 const struct wpan_phy_supported *caps = &rdev->wpan_phy.supported; nl802154_put_capabilities()
391 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_ED_LEVEL) { nl802154_put_capabilities()
407 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_TXPOWER) { nl802154_put_capabilities()
422 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_MODE) { nl802154_put_capabilities()
452 static int nl802154_send_wpan_phy(struct cfg802154_registered_device *rdev, nl802154_send_wpan_phy() argument
465 if (nla_put_u32(msg, NL802154_ATTR_WPAN_PHY, rdev->wpan_phy_idx) || nl802154_send_wpan_phy()
467 wpan_phy_name(&rdev->wpan_phy)) || nl802154_send_wpan_phy()
479 rdev->wpan_phy.current_page) || nl802154_send_wpan_phy()
481 rdev->wpan_phy.current_channel)) nl802154_send_wpan_phy()
487 if (nl802154_send_wpan_phy_channels(rdev, msg)) nl802154_send_wpan_phy()
491 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_MODE) { nl802154_send_wpan_phy()
493 rdev->wpan_phy.cca.mode)) nl802154_send_wpan_phy()
496 if (rdev->wpan_phy.cca.mode == NL802154_CCA_ENERGY_CARRIER) { nl802154_send_wpan_phy()
498 rdev->wpan_phy.cca.opt)) nl802154_send_wpan_phy()
503 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_TXPOWER) { nl802154_send_wpan_phy()
505 rdev->wpan_phy.transmit_power)) nl802154_send_wpan_phy()
509 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_ED_LEVEL) { nl802154_send_wpan_phy()
511 rdev->wpan_phy.cca_ed_level)) nl802154_send_wpan_phy()
515 if (nl802154_put_capabilities(msg, rdev)) nl802154_send_wpan_phy()
525 if (rdev->ops->op) { \ nl802154_send_wpan_phy()
543 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_TXPOWER) nl802154_send_wpan_phy()
546 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_ED_LEVEL) nl802154_send_wpan_phy()
549 if (rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_MODE) nl802154_send_wpan_phy()
590 struct cfg802154_registered_device *rdev; nl802154_dump_wpan_phy_parse() local
598 rdev = wpan_phy_to_rdev( nl802154_dump_wpan_phy_parse()
600 state->filter_wpan_phy = rdev->wpan_phy_idx; nl802154_dump_wpan_phy_parse()
612 struct cfg802154_registered_device *rdev; nl802154_dump_wpan_phy() local
631 list_for_each_entry(rdev, &cfg802154_rdev_list, list) { nl802154_dump_wpan_phy()
636 state->filter_wpan_phy != rdev->wpan_phy_idx) nl802154_dump_wpan_phy()
639 ret = nl802154_send_wpan_phy(rdev, nl802154_dump_wpan_phy()
672 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_get_wpan_phy() local
678 if (nl802154_send_wpan_phy(rdev, NL802154_CMD_NEW_WPAN_PHY, msg, nl802154_get_wpan_phy()
763 struct cfg802154_registered_device *rdev, nl802154_get_llsec_params()
770 ret = rdev_get_llsec_params(rdev, wpan_dev, &params); nl802154_get_llsec_params()
796 struct cfg802154_registered_device *rdev, nl802154_send_iface()
812 if (nla_put_u32(msg, NL802154_ATTR_WPAN_PHY, rdev->wpan_phy_idx) || nl802154_send_iface()
816 rdev->devlist_generation ^ nl802154_send_iface()
846 if (nl802154_get_llsec_params(msg, rdev, wpan_dev) < 0) nl802154_send_iface()
865 struct cfg802154_registered_device *rdev; nl802154_dump_interface() local
869 list_for_each_entry(rdev, &cfg802154_rdev_list, list) { nl802154_dump_interface()
877 list_for_each_entry(wpan_dev, &rdev->wpan_dev_list, list) { nl802154_dump_interface()
884 rdev, wpan_dev) < 0) { nl802154_dump_interface()
904 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_get_interface() local
912 rdev, wdev) < 0) { nl802154_get_interface()
922 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_new_interface() local
936 !(rdev->wpan_phy.supported.iftypes & BIT(type))) nl802154_new_interface()
943 if (!rdev->ops->add_virtual_intf) nl802154_new_interface()
946 return rdev_add_virtual_intf(rdev, nl802154_new_interface()
953 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_del_interface() local
956 if (!rdev->ops->del_virtual_intf) nl802154_del_interface()
968 return rdev_del_virtual_intf(rdev, wpan_dev); nl802154_del_interface()
973 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_channel() local
985 !(rdev->wpan_phy.supported.channels[page] & BIT(channel))) nl802154_set_channel()
988 return rdev_set_channel(rdev, page, channel); nl802154_set_channel()
993 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_cca_mode() local
996 if (!(rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_MODE)) nl802154_set_cca_mode()
1006 !(rdev->wpan_phy.supported.cca_modes & BIT(cca.mode))) nl802154_set_cca_mode()
1015 !(rdev->wpan_phy.supported.cca_opts & BIT(cca.opt))) nl802154_set_cca_mode()
1019 return rdev_set_cca_mode(rdev, &cca); nl802154_set_cca_mode()
1024 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_cca_ed_level() local
1028 if (!(rdev->wpan_phy.flags & WPAN_PHY_FLAG_CCA_ED_LEVEL)) nl802154_set_cca_ed_level()
1036 for (i = 0; i < rdev->wpan_phy.supported.cca_ed_levels_size; i++) { nl802154_set_cca_ed_level()
1037 if (ed_level == rdev->wpan_phy.supported.cca_ed_levels[i]) nl802154_set_cca_ed_level()
1038 return rdev_set_cca_ed_level(rdev, ed_level); nl802154_set_cca_ed_level()
1046 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_tx_power() local
1050 if (!(rdev->wpan_phy.flags & WPAN_PHY_FLAG_TXPOWER)) nl802154_set_tx_power()
1058 for (i = 0; i < rdev->wpan_phy.supported.tx_powers_size; i++) { nl802154_set_tx_power()
1059 if (power == rdev->wpan_phy.supported.tx_powers[i]) nl802154_set_tx_power()
1060 return rdev_set_tx_power(rdev, power); nl802154_set_tx_power()
1068 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_pan_id() local
1094 return rdev_set_pan_id(rdev, wpan_dev, pan_id); nl802154_set_pan_id()
1099 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_short_addr() local
1130 return rdev_set_short_addr(rdev, wpan_dev, short_addr); nl802154_set_short_addr()
1136 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_backoff_exponent() local
1153 if (min_be < rdev->wpan_phy.supported.min_minbe || nl802154_set_backoff_exponent()
1154 min_be > rdev->wpan_phy.supported.max_minbe || nl802154_set_backoff_exponent()
1155 max_be < rdev->wpan_phy.supported.min_maxbe || nl802154_set_backoff_exponent()
1156 max_be > rdev->wpan_phy.supported.max_maxbe || nl802154_set_backoff_exponent()
1160 return rdev_set_backoff_exponent(rdev, wpan_dev, min_be, max_be); nl802154_set_backoff_exponent()
1166 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_max_csma_backoffs() local
1182 if (max_csma_backoffs < rdev->wpan_phy.supported.min_csma_backoffs || nl802154_set_max_csma_backoffs()
1183 max_csma_backoffs > rdev->wpan_phy.supported.max_csma_backoffs) nl802154_set_max_csma_backoffs()
1186 return rdev_set_max_csma_backoffs(rdev, wpan_dev, max_csma_backoffs); nl802154_set_max_csma_backoffs()
1192 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_max_frame_retries() local
1207 if (max_frame_retries < rdev->wpan_phy.supported.min_frame_retries || nl802154_set_max_frame_retries()
1208 max_frame_retries > rdev->wpan_phy.supported.max_frame_retries) nl802154_set_max_frame_retries()
1211 return rdev_set_max_frame_retries(rdev, wpan_dev, max_frame_retries); nl802154_set_max_frame_retries()
1216 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_lbt_mode() local
1232 if (!wpan_phy_supported_bool(mode, rdev->wpan_phy.supported.lbt)) nl802154_set_lbt_mode()
1235 return rdev_set_lbt_mode(rdev, wpan_dev, mode); nl802154_set_lbt_mode()
1241 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_ackreq_default() local
1257 return rdev_set_ackreq_default(rdev, wpan_dev, ackreq); nl802154_set_ackreq_default()
1363 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_set_llsec_params() local
1403 return rdev_set_llsec_params(rdev, wpan_dev, &params, changed); nl802154_set_llsec_params()
1408 struct cfg802154_registered_device *rdev, nl802154_send_key()
1466 struct cfg802154_registered_device *rdev = NULL; nl802154_dump_llsec_key() local
1472 err = nl802154_prepare_wpan_dev_dump(skb, cb, &rdev, &wpan_dev); nl802154_dump_llsec_key()
1481 rdev_lock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_key()
1482 rdev_get_llsec_table(rdev, wpan_dev, &table); nl802154_dump_llsec_key()
1492 rdev, wpan_dev->netdev, key) < 0) { nl802154_dump_llsec_key()
1495 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_key()
1503 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_key()
1506 nl802154_finish_wpan_dev_dump(rdev); nl802154_dump_llsec_key()
1522 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_add_llsec_key() local
1569 return rdev_add_llsec_key(rdev, wpan_dev, &id, &key); nl802154_add_llsec_key()
1574 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_del_llsec_key() local
1588 return rdev_del_llsec_key(rdev, wpan_dev, &id); nl802154_del_llsec_key()
1593 struct cfg802154_registered_device *rdev, nl802154_send_device()
1636 struct cfg802154_registered_device *rdev = NULL; nl802154_dump_llsec_dev() local
1642 err = nl802154_prepare_wpan_dev_dump(skb, cb, &rdev, &wpan_dev); nl802154_dump_llsec_dev()
1651 rdev_lock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_dev()
1652 rdev_get_llsec_table(rdev, wpan_dev, &table); nl802154_dump_llsec_dev()
1662 rdev, wpan_dev->netdev, dev) < 0) { nl802154_dump_llsec_dev()
1665 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_dev()
1673 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_dev()
1676 nl802154_finish_wpan_dev_dump(rdev); nl802154_dump_llsec_dev()
1728 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_add_llsec_dev() local
1737 return rdev_add_device(rdev, wpan_dev, &dev_desc); nl802154_add_llsec_dev()
1742 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_del_llsec_dev() local
1757 return rdev_del_device(rdev, wpan_dev, extended_addr); nl802154_del_llsec_dev()
1762 struct cfg802154_registered_device *rdev, nl802154_send_devkey()
1807 struct cfg802154_registered_device *rdev = NULL; nl802154_dump_llsec_devkey() local
1814 err = nl802154_prepare_wpan_dev_dump(skb, cb, &rdev, &wpan_dev); nl802154_dump_llsec_devkey()
1823 rdev_lock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_devkey()
1824 rdev_get_llsec_table(rdev, wpan_dev, &table); nl802154_dump_llsec_devkey()
1837 NLM_F_MULTI, rdev, nl802154_dump_llsec_devkey()
1843 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_devkey()
1852 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_devkey()
1855 nl802154_finish_wpan_dev_dump(rdev); nl802154_dump_llsec_devkey()
1868 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_add_llsec_devkey() local
1896 return rdev_add_devkey(rdev, wpan_dev, extended_addr, &key); nl802154_add_llsec_devkey()
1901 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_del_llsec_devkey() local
1925 return rdev_del_devkey(rdev, wpan_dev, extended_addr, &key); nl802154_del_llsec_devkey()
1930 struct cfg802154_registered_device *rdev, nl802154_send_seclevel()
1973 struct cfg802154_registered_device *rdev = NULL; nl802154_dump_llsec_seclevel() local
1979 err = nl802154_prepare_wpan_dev_dump(skb, cb, &rdev, &wpan_dev); nl802154_dump_llsec_seclevel()
1988 rdev_lock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_seclevel()
1989 rdev_get_llsec_table(rdev, wpan_dev, &table); nl802154_dump_llsec_seclevel()
1999 rdev, wpan_dev->netdev, sl) < 0) { nl802154_dump_llsec_seclevel()
2002 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_seclevel()
2010 rdev_unlock_llsec_table(rdev, wpan_dev); nl802154_dump_llsec_seclevel()
2013 nl802154_finish_wpan_dev_dump(rdev); nl802154_dump_llsec_seclevel()
2063 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_add_llsec_seclevel() local
2072 return rdev_add_seclevel(rdev, wpan_dev, &sl); nl802154_add_llsec_seclevel()
2078 struct cfg802154_registered_device *rdev = info->user_ptr[0]; nl802154_del_llsec_seclevel() local
2088 return rdev_del_seclevel(rdev, wpan_dev, &sl); nl802154_del_llsec_seclevel()
2105 struct cfg802154_registered_device *rdev; nl802154_pre_doit() local
2114 rdev = cfg802154_get_dev_from_info(genl_info_net(info), info); nl802154_pre_doit()
2115 if (IS_ERR(rdev)) { nl802154_pre_doit()
2118 return PTR_ERR(rdev); nl802154_pre_doit()
2120 info->user_ptr[0] = rdev; nl802154_pre_doit()
2133 rdev = wpan_phy_to_rdev(wpan_dev->wpan_phy); nl802154_pre_doit()
2158 info->user_ptr[0] = rdev; nl802154_pre_doit()
251 nl802154_prepare_wpan_dev_dump(struct sk_buff *skb, struct netlink_callback *cb, struct cfg802154_registered_device **rdev, struct wpan_dev **wpan_dev) nl802154_prepare_wpan_dev_dump() argument
367 nl802154_put_capabilities(struct sk_buff *msg, struct cfg802154_registered_device *rdev) nl802154_put_capabilities() argument
762 nl802154_get_llsec_params(struct sk_buff *msg, struct cfg802154_registered_device *rdev, struct wpan_dev *wpan_dev) nl802154_get_llsec_params() argument
795 nl802154_send_iface(struct sk_buff *msg, u32 portid, u32 seq, int flags, struct cfg802154_registered_device *rdev, struct wpan_dev *wpan_dev) nl802154_send_iface() argument
1406 nl802154_send_key(struct sk_buff *msg, u32 cmd, u32 portid, u32 seq, int flags, struct cfg802154_registered_device *rdev, struct net_device *dev, const struct ieee802154_llsec_key_entry *key) nl802154_send_key() argument
1591 nl802154_send_device(struct sk_buff *msg, u32 cmd, u32 portid, u32 seq, int flags, struct cfg802154_registered_device *rdev, struct net_device *dev, const struct ieee802154_llsec_device *dev_desc) nl802154_send_device() argument
1760 nl802154_send_devkey(struct sk_buff *msg, u32 cmd, u32 portid, u32 seq, int flags, struct cfg802154_registered_device *rdev, struct net_device *dev, __le64 extended_addr, const struct ieee802154_llsec_device_key *devkey) nl802154_send_devkey() argument
1928 nl802154_send_seclevel(struct sk_buff *msg, u32 cmd, u32 portid, u32 seq, int flags, struct cfg802154_registered_device *rdev, struct net_device *dev, const struct ieee802154_llsec_seclevel *sl) nl802154_send_seclevel() argument
/linux-4.4.14/drivers/gpu/drm/radeon/
H A Dradeon_asic.h34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
61 int r100_init(struct radeon_device *rdev);
62 void r100_fini(struct radeon_device *rdev);
63 int r100_suspend(struct radeon_device *rdev);
64 int r100_resume(struct radeon_device *rdev);
65 void r100_vga_set_state(struct radeon_device *rdev, bool state);
66 bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
67 int r100_asic_reset(struct radeon_device *rdev);
68 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
69 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
71 void r100_pci_gart_set_page(struct radeon_device *rdev, unsigned i,
73 void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
74 int r100_irq_set(struct radeon_device *rdev);
75 int r100_irq_process(struct radeon_device *rdev);
76 void r100_fence_ring_emit(struct radeon_device *rdev,
78 bool r100_semaphore_ring_emit(struct radeon_device *rdev,
83 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
84 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
85 struct radeon_fence *r100_copy_blit(struct radeon_device *rdev,
90 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
93 void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
94 void r100_bandwidth_update(struct radeon_device *rdev);
95 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
96 int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
97 void r100_hpd_init(struct radeon_device *rdev);
98 void r100_hpd_fini(struct radeon_device *rdev);
99 bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
100 void r100_hpd_set_polarity(struct radeon_device *rdev,
102 int r100_debugfs_rbbm_init(struct radeon_device *rdev);
103 int r100_debugfs_cp_init(struct radeon_device *rdev);
104 void r100_cp_disable(struct radeon_device *rdev);
105 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
106 void r100_cp_fini(struct radeon_device *rdev);
107 int r100_pci_gart_init(struct radeon_device *rdev);
108 void r100_pci_gart_fini(struct radeon_device *rdev);
109 int r100_pci_gart_enable(struct radeon_device *rdev);
110 void r100_pci_gart_disable(struct radeon_device *rdev);
111 int r100_debugfs_mc_info_init(struct radeon_device *rdev);
112 int r100_gui_wait_for_idle(struct radeon_device *rdev);
113 int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
114 void r100_irq_disable(struct radeon_device *rdev);
115 void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
116 void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
117 void r100_vram_init_sizes(struct radeon_device *rdev);
118 int r100_cp_reset(struct radeon_device *rdev);
119 void r100_vga_render_disable(struct radeon_device *rdev);
120 void r100_restore_sanity(struct radeon_device *rdev);
131 void r100_enable_bm(struct radeon_device *rdev);
132 void r100_set_common_regs(struct radeon_device *rdev);
133 void r100_bm_disable(struct radeon_device *rdev);
134 extern bool r100_gui_idle(struct radeon_device *rdev);
135 extern void r100_pm_misc(struct radeon_device *rdev);
136 extern void r100_pm_prepare(struct radeon_device *rdev);
137 extern void r100_pm_finish(struct radeon_device *rdev);
138 extern void r100_pm_init_profile(struct radeon_device *rdev);
139 extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
140 extern void r100_page_flip(struct radeon_device *rdev, int crtc,
142 extern bool r100_page_flip_pending(struct radeon_device *rdev, int crtc);
143 extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
144 extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
146 u32 r100_gfx_get_rptr(struct radeon_device *rdev,
148 u32 r100_gfx_get_wptr(struct radeon_device *rdev,
150 void r100_gfx_set_wptr(struct radeon_device *rdev,
156 struct radeon_fence *r200_copy_dma(struct radeon_device *rdev,
161 void r200_set_safe_registers(struct radeon_device *rdev);
166 extern int r300_init(struct radeon_device *rdev);
167 extern void r300_fini(struct radeon_device *rdev);
168 extern int r300_suspend(struct radeon_device *rdev);
169 extern int r300_resume(struct radeon_device *rdev);
170 extern int r300_asic_reset(struct radeon_device *rdev);
171 extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
172 extern void r300_fence_ring_emit(struct radeon_device *rdev,
175 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
177 extern void rv370_pcie_gart_set_page(struct radeon_device *rdev, unsigned i,
179 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
180 extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
181 extern void r300_set_reg_safe(struct radeon_device *rdev);
182 extern void r300_mc_program(struct radeon_device *rdev);
183 extern void r300_mc_init(struct radeon_device *rdev);
184 extern void r300_clock_startup(struct radeon_device *rdev);
185 extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
186 extern int rv370_pcie_gart_init(struct radeon_device *rdev);
187 extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
188 extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
189 extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
190 extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
195 extern int r420_init(struct radeon_device *rdev);
196 extern void r420_fini(struct radeon_device *rdev);
197 extern int r420_suspend(struct radeon_device *rdev);
198 extern int r420_resume(struct radeon_device *rdev);
199 extern void r420_pm_init_profile(struct radeon_device *rdev);
200 extern u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg);
201 extern void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
202 extern int r420_debugfs_pipes_info_init(struct radeon_device *rdev);
203 extern void r420_pipes_init(struct radeon_device *rdev);
208 extern int rs400_init(struct radeon_device *rdev);
209 extern void rs400_fini(struct radeon_device *rdev);
210 extern int rs400_suspend(struct radeon_device *rdev);
211 extern int rs400_resume(struct radeon_device *rdev);
212 void rs400_gart_tlb_flush(struct radeon_device *rdev);
214 void rs400_gart_set_page(struct radeon_device *rdev, unsigned i,
216 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
218 int rs400_gart_init(struct radeon_device *rdev);
219 int rs400_gart_enable(struct radeon_device *rdev);
220 void rs400_gart_adjust_size(struct radeon_device *rdev);
221 void rs400_gart_disable(struct radeon_device *rdev);
222 void rs400_gart_fini(struct radeon_device *rdev);
223 extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
228 extern int rs600_asic_reset(struct radeon_device *rdev);
229 extern int rs600_init(struct radeon_device *rdev);
230 extern void rs600_fini(struct radeon_device *rdev);
231 extern int rs600_suspend(struct radeon_device *rdev);
232 extern int rs600_resume(struct radeon_device *rdev);
233 int rs600_irq_set(struct radeon_device *rdev);
234 int rs600_irq_process(struct radeon_device *rdev);
235 void rs600_irq_disable(struct radeon_device *rdev);
236 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
237 void rs600_gart_tlb_flush(struct radeon_device *rdev);
239 void rs600_gart_set_page(struct radeon_device *rdev, unsigned i,
241 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
242 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
243 void rs600_bandwidth_update(struct radeon_device *rdev);
244 void rs600_hpd_init(struct radeon_device *rdev);
245 void rs600_hpd_fini(struct radeon_device *rdev);
246 bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
247 void rs600_hpd_set_polarity(struct radeon_device *rdev,
249 extern void rs600_pm_misc(struct radeon_device *rdev);
250 extern void rs600_pm_prepare(struct radeon_device *rdev);
251 extern void rs600_pm_finish(struct radeon_device *rdev);
252 extern void rs600_page_flip(struct radeon_device *rdev, int crtc,
254 extern bool rs600_page_flip_pending(struct radeon_device *rdev, int crtc);
255 void rs600_set_safe_registers(struct radeon_device *rdev);
256 extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
257 extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
262 int rs690_init(struct radeon_device *rdev);
263 void rs690_fini(struct radeon_device *rdev);
264 int rs690_resume(struct radeon_device *rdev);
265 int rs690_suspend(struct radeon_device *rdev);
266 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
267 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
268 void rs690_bandwidth_update(struct radeon_device *rdev);
269 void rs690_line_buffer_adjust(struct radeon_device *rdev,
272 extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
283 int rv515_init(struct radeon_device *rdev);
284 void rv515_fini(struct radeon_device *rdev);
285 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
286 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
287 void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
288 void rv515_bandwidth_update(struct radeon_device *rdev);
289 int rv515_resume(struct radeon_device *rdev);
290 int rv515_suspend(struct radeon_device *rdev);
291 void rv515_bandwidth_avivo_update(struct radeon_device *rdev);
292 void rv515_vga_render_disable(struct radeon_device *rdev);
293 void rv515_set_safe_registers(struct radeon_device *rdev);
294 void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
295 void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
296 void rv515_clock_startup(struct radeon_device *rdev);
297 void rv515_debugfs(struct radeon_device *rdev);
298 int rv515_mc_wait_for_idle(struct radeon_device *rdev);
303 int r520_init(struct radeon_device *rdev);
304 int r520_resume(struct radeon_device *rdev);
305 int r520_mc_wait_for_idle(struct radeon_device *rdev);
310 int r600_init(struct radeon_device *rdev);
311 void r600_fini(struct radeon_device *rdev);
312 int r600_suspend(struct radeon_device *rdev);
313 int r600_resume(struct radeon_device *rdev);
314 void r600_vga_set_state(struct radeon_device *rdev, bool state);
315 int r600_wb_init(struct radeon_device *rdev);
316 void r600_wb_fini(struct radeon_device *rdev);
317 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
318 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
319 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
322 void r600_fence_ring_emit(struct radeon_device *rdev,
324 bool r600_semaphore_ring_emit(struct radeon_device *rdev,
328 void r600_dma_fence_ring_emit(struct radeon_device *rdev,
330 bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev,
334 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
335 bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
336 bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
337 int r600_asic_reset(struct radeon_device *rdev);
338 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
341 void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
342 int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
343 int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
344 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
345 int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
346 int r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
347 struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
351 struct radeon_fence *r600_copy_dma(struct radeon_device *rdev,
355 void r600_hpd_init(struct radeon_device *rdev);
356 void r600_hpd_fini(struct radeon_device *rdev);
357 bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
358 void r600_hpd_set_polarity(struct radeon_device *rdev,
360 extern void r600_mmio_hdp_flush(struct radeon_device *rdev);
361 extern bool r600_gui_idle(struct radeon_device *rdev);
362 extern void r600_pm_misc(struct radeon_device *rdev);
363 extern void r600_pm_init_profile(struct radeon_device *rdev);
364 extern void rs780_pm_init_profile(struct radeon_device *rdev);
365 extern uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg);
366 extern void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
367 extern void r600_pm_get_dynpm_state(struct radeon_device *rdev);
368 extern void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes);
369 extern int r600_get_pcie_lanes(struct radeon_device *rdev);
370 bool r600_card_posted(struct radeon_device *rdev);
371 void r600_cp_stop(struct radeon_device *rdev);
372 int r600_cp_start(struct radeon_device *rdev);
373 void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
374 int r600_cp_resume(struct radeon_device *rdev);
375 void r600_cp_fini(struct radeon_device *rdev);
377 int r600_mc_wait_for_idle(struct radeon_device *rdev);
378 int r600_pcie_gart_init(struct radeon_device *rdev);
379 void r600_scratch_init(struct radeon_device *rdev);
380 int r600_init_microcode(struct radeon_device *rdev);
381 u32 r600_gfx_get_rptr(struct radeon_device *rdev,
383 u32 r600_gfx_get_wptr(struct radeon_device *rdev,
385 void r600_gfx_set_wptr(struct radeon_device *rdev,
387 int r600_get_allowed_info_register(struct radeon_device *rdev,
390 int r600_irq_process(struct radeon_device *rdev);
391 int r600_irq_init(struct radeon_device *rdev);
392 void r600_irq_fini(struct radeon_device *rdev);
393 void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size);
394 int r600_irq_set(struct radeon_device *rdev);
395 void r600_irq_suspend(struct radeon_device *rdev);
396 void r600_disable_interrupts(struct radeon_device *rdev);
397 void r600_rlc_stop(struct radeon_device *rdev);
399 void r600_audio_fini(struct radeon_device *rdev);
407 int r600_mc_wait_for_idle(struct radeon_device *rdev);
408 u32 r600_get_xclk(struct radeon_device *rdev);
409 uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev);
410 int rv6xx_get_temp(struct radeon_device *rdev);
411 int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
412 int r600_dpm_pre_set_power_state(struct radeon_device *rdev);
413 void r600_dpm_post_set_power_state(struct radeon_device *rdev);
414 int r600_dpm_late_enable(struct radeon_device *rdev);
416 uint32_t r600_dma_get_rptr(struct radeon_device *rdev,
418 uint32_t r600_dma_get_wptr(struct radeon_device *rdev,
420 void r600_dma_set_wptr(struct radeon_device *rdev,
423 int rv6xx_dpm_init(struct radeon_device *rdev);
424 int rv6xx_dpm_enable(struct radeon_device *rdev);
425 void rv6xx_dpm_disable(struct radeon_device *rdev);
426 int rv6xx_dpm_set_power_state(struct radeon_device *rdev);
427 void rv6xx_setup_asic(struct radeon_device *rdev);
428 void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev);
429 void rv6xx_dpm_fini(struct radeon_device *rdev);
430 u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low);
431 u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low);
432 void rv6xx_dpm_print_power_state(struct radeon_device *rdev,
434 void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
436 int rv6xx_dpm_force_performance_level(struct radeon_device *rdev,
438 u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev);
439 u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev);
441 int rs780_dpm_init(struct radeon_device *rdev);
442 int rs780_dpm_enable(struct radeon_device *rdev);
443 void rs780_dpm_disable(struct radeon_device *rdev);
444 int rs780_dpm_set_power_state(struct radeon_device *rdev);
445 void rs780_dpm_setup_asic(struct radeon_device *rdev);
446 void rs780_dpm_display_configuration_changed(struct radeon_device *rdev);
447 void rs780_dpm_fini(struct radeon_device *rdev);
448 u32 rs780_dpm_get_sclk(struct radeon_device *rdev, bool low);
449 u32 rs780_dpm_get_mclk(struct radeon_device *rdev, bool low);
450 void rs780_dpm_print_power_state(struct radeon_device *rdev,
452 void rs780_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
454 int rs780_dpm_force_performance_level(struct radeon_device *rdev,
456 u32 rs780_dpm_get_current_sclk(struct radeon_device *rdev);
457 u32 rs780_dpm_get_current_mclk(struct radeon_device *rdev);
462 int rv770_init(struct radeon_device *rdev);
463 void rv770_fini(struct radeon_device *rdev);
464 int rv770_suspend(struct radeon_device *rdev);
465 int rv770_resume(struct radeon_device *rdev);
466 void rv770_pm_misc(struct radeon_device *rdev);
467 void rv770_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
468 bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc);
469 void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
470 void r700_cp_stop(struct radeon_device *rdev);
471 void r700_cp_fini(struct radeon_device *rdev);
472 struct radeon_fence *rv770_copy_dma(struct radeon_device *rdev,
476 u32 rv770_get_xclk(struct radeon_device *rdev);
477 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
478 int rv770_get_temp(struct radeon_device *rdev);
480 int rv770_dpm_init(struct radeon_device *rdev);
481 int rv770_dpm_enable(struct radeon_device *rdev);
482 int rv770_dpm_late_enable(struct radeon_device *rdev);
483 void rv770_dpm_disable(struct radeon_device *rdev);
484 int rv770_dpm_set_power_state(struct radeon_device *rdev);
485 void rv770_dpm_setup_asic(struct radeon_device *rdev);
486 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev);
487 void rv770_dpm_fini(struct radeon_device *rdev);
488 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low);
489 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low);
490 void rv770_dpm_print_power_state(struct radeon_device *rdev,
492 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
494 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
496 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev);
497 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev);
498 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev);
509 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
510 int evergreen_init(struct radeon_device *rdev);
511 void evergreen_fini(struct radeon_device *rdev);
512 int evergreen_suspend(struct radeon_device *rdev);
513 int evergreen_resume(struct radeon_device *rdev);
514 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
515 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
516 int evergreen_asic_reset(struct radeon_device *rdev);
517 void evergreen_bandwidth_update(struct radeon_device *rdev);
518 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
519 void evergreen_hpd_init(struct radeon_device *rdev);
520 void evergreen_hpd_fini(struct radeon_device *rdev);
521 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
522 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
524 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc);
525 int evergreen_irq_set(struct radeon_device *rdev);
526 int evergreen_irq_process(struct radeon_device *rdev);
529 extern void evergreen_pm_misc(struct radeon_device *rdev);
530 extern void evergreen_pm_prepare(struct radeon_device *rdev);
531 extern void evergreen_pm_finish(struct radeon_device *rdev);
532 extern void sumo_pm_init_profile(struct radeon_device *rdev);
533 extern void btc_pm_init_profile(struct radeon_device *rdev);
534 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
535 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
536 extern void evergreen_page_flip(struct radeon_device *rdev, int crtc,
538 extern bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc);
539 extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
540 void evergreen_disable_interrupt_state(struct radeon_device *rdev);
541 int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
542 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
544 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
546 struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev,
550 int evergreen_get_temp(struct radeon_device *rdev);
551 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
553 int sumo_get_temp(struct radeon_device *rdev);
554 int tn_get_temp(struct radeon_device *rdev);
555 int cypress_dpm_init(struct radeon_device *rdev);
556 void cypress_dpm_setup_asic(struct radeon_device *rdev);
557 int cypress_dpm_enable(struct radeon_device *rdev);
558 void cypress_dpm_disable(struct radeon_device *rdev);
559 int cypress_dpm_set_power_state(struct radeon_device *rdev);
560 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev);
561 void cypress_dpm_fini(struct radeon_device *rdev);
562 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev);
563 int btc_dpm_init(struct radeon_device *rdev);
564 void btc_dpm_setup_asic(struct radeon_device *rdev);
565 int btc_dpm_enable(struct radeon_device *rdev);
566 void btc_dpm_disable(struct radeon_device *rdev);
567 int btc_dpm_pre_set_power_state(struct radeon_device *rdev);
568 int btc_dpm_set_power_state(struct radeon_device *rdev);
569 void btc_dpm_post_set_power_state(struct radeon_device *rdev);
570 void btc_dpm_fini(struct radeon_device *rdev);
571 u32 btc_dpm_get_sclk(struct radeon_device *rdev, bool low);
572 u32 btc_dpm_get_mclk(struct radeon_device *rdev, bool low);
573 bool btc_dpm_vblank_too_short(struct radeon_device *rdev);
574 void btc_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
576 u32 btc_dpm_get_current_sclk(struct radeon_device *rdev);
577 u32 btc_dpm_get_current_mclk(struct radeon_device *rdev);
578 int sumo_dpm_init(struct radeon_device *rdev);
579 int sumo_dpm_enable(struct radeon_device *rdev);
580 int sumo_dpm_late_enable(struct radeon_device *rdev);
581 void sumo_dpm_disable(struct radeon_device *rdev);
582 int sumo_dpm_pre_set_power_state(struct radeon_device *rdev);
583 int sumo_dpm_set_power_state(struct radeon_device *rdev);
584 void sumo_dpm_post_set_power_state(struct radeon_device *rdev);
585 void sumo_dpm_setup_asic(struct radeon_device *rdev);
586 void sumo_dpm_display_configuration_changed(struct radeon_device *rdev);
587 void sumo_dpm_fini(struct radeon_device *rdev);
588 u32 sumo_dpm_get_sclk(struct radeon_device *rdev, bool low);
589 u32 sumo_dpm_get_mclk(struct radeon_device *rdev, bool low);
590 void sumo_dpm_print_power_state(struct radeon_device *rdev,
592 void sumo_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
594 int sumo_dpm_force_performance_level(struct radeon_device *rdev,
596 u32 sumo_dpm_get_current_sclk(struct radeon_device *rdev);
597 u32 sumo_dpm_get_current_mclk(struct radeon_device *rdev);
602 void cayman_fence_ring_emit(struct radeon_device *rdev,
604 void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
605 int cayman_init(struct radeon_device *rdev);
606 void cayman_fini(struct radeon_device *rdev);
607 int cayman_suspend(struct radeon_device *rdev);
608 int cayman_resume(struct radeon_device *rdev);
609 int cayman_asic_reset(struct radeon_device *rdev);
610 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
611 int cayman_vm_init(struct radeon_device *rdev);
612 void cayman_vm_fini(struct radeon_device *rdev);
613 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
615 uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
616 int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
617 int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
618 void cayman_dma_ring_ib_execute(struct radeon_device *rdev,
620 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
621 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
623 void cayman_dma_vm_copy_pages(struct radeon_device *rdev,
627 void cayman_dma_vm_write_pages(struct radeon_device *rdev,
632 void cayman_dma_vm_set_pages(struct radeon_device *rdev,
639 void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
642 u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
644 u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
646 void cayman_gfx_set_wptr(struct radeon_device *rdev,
648 uint32_t cayman_dma_get_rptr(struct radeon_device *rdev,
650 uint32_t cayman_dma_get_wptr(struct radeon_device *rdev,
652 void cayman_dma_set_wptr(struct radeon_device *rdev,
654 int cayman_get_allowed_info_register(struct radeon_device *rdev,
657 int ni_dpm_init(struct radeon_device *rdev);
658 void ni_dpm_setup_asic(struct radeon_device *rdev);
659 int ni_dpm_enable(struct radeon_device *rdev);
660 void ni_dpm_disable(struct radeon_device *rdev);
661 int ni_dpm_pre_set_power_state(struct radeon_device *rdev);
662 int ni_dpm_set_power_state(struct radeon_device *rdev);
663 void ni_dpm_post_set_power_state(struct radeon_device *rdev);
664 void ni_dpm_fini(struct radeon_device *rdev);
665 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low);
666 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low);
667 void ni_dpm_print_power_state(struct radeon_device *rdev,
669 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
671 int ni_dpm_force_performance_level(struct radeon_device *rdev,
673 bool ni_dpm_vblank_too_short(struct radeon_device *rdev);
674 u32 ni_dpm_get_current_sclk(struct radeon_device *rdev);
675 u32 ni_dpm_get_current_mclk(struct radeon_device *rdev);
676 int trinity_dpm_init(struct radeon_device *rdev);
677 int trinity_dpm_enable(struct radeon_device *rdev);
678 int trinity_dpm_late_enable(struct radeon_device *rdev);
679 void trinity_dpm_disable(struct radeon_device *rdev);
680 int trinity_dpm_pre_set_power_state(struct radeon_device *rdev);
681 int trinity_dpm_set_power_state(struct radeon_device *rdev);
682 void trinity_dpm_post_set_power_state(struct radeon_device *rdev);
683 void trinity_dpm_setup_asic(struct radeon_device *rdev);
684 void trinity_dpm_display_configuration_changed(struct radeon_device *rdev);
685 void trinity_dpm_fini(struct radeon_device *rdev);
686 u32 trinity_dpm_get_sclk(struct radeon_device *rdev, bool low);
687 u32 trinity_dpm_get_mclk(struct radeon_device *rdev, bool low);
688 void trinity_dpm_print_power_state(struct radeon_device *rdev,
690 void trinity_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
692 int trinity_dpm_force_performance_level(struct radeon_device *rdev,
694 void trinity_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
695 u32 trinity_dpm_get_current_sclk(struct radeon_device *rdev);
696 u32 trinity_dpm_get_current_mclk(struct radeon_device *rdev);
697 int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
700 void dce6_bandwidth_update(struct radeon_device *rdev);
701 void dce6_audio_fini(struct radeon_device *rdev);
706 void si_fence_ring_emit(struct radeon_device *rdev,
708 void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
709 int si_init(struct radeon_device *rdev);
710 void si_fini(struct radeon_device *rdev);
711 int si_suspend(struct radeon_device *rdev);
712 int si_resume(struct radeon_device *rdev);
713 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
714 bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
715 int si_asic_reset(struct radeon_device *rdev);
716 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
717 int si_irq_set(struct radeon_device *rdev);
718 int si_irq_process(struct radeon_device *rdev);
719 int si_vm_init(struct radeon_device *rdev);
720 void si_vm_fini(struct radeon_device *rdev);
721 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
723 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
724 struct radeon_fence *si_copy_dma(struct radeon_device *rdev,
729 void si_dma_vm_copy_pages(struct radeon_device *rdev,
733 void si_dma_vm_write_pages(struct radeon_device *rdev,
738 void si_dma_vm_set_pages(struct radeon_device *rdev,
744 void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
746 u32 si_get_xclk(struct radeon_device *rdev);
747 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev);
748 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
749 int si_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
750 int si_get_temp(struct radeon_device *rdev);
751 int si_get_allowed_info_register(struct radeon_device *rdev,
753 int si_dpm_init(struct radeon_device *rdev);
754 void si_dpm_setup_asic(struct radeon_device *rdev);
755 int si_dpm_enable(struct radeon_device *rdev);
756 int si_dpm_late_enable(struct radeon_device *rdev);
757 void si_dpm_disable(struct radeon_device *rdev);
758 int si_dpm_pre_set_power_state(struct radeon_device *rdev);
759 int si_dpm_set_power_state(struct radeon_device *rdev);
760 void si_dpm_post_set_power_state(struct radeon_device *rdev);
761 void si_dpm_fini(struct radeon_device *rdev);
762 void si_dpm_display_configuration_changed(struct radeon_device *rdev);
763 void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
765 int si_dpm_force_performance_level(struct radeon_device *rdev,
767 int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
769 int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
771 u32 si_fan_ctrl_get_mode(struct radeon_device *rdev);
772 void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
773 u32 si_dpm_get_current_sclk(struct radeon_device *rdev);
774 u32 si_dpm_get_current_mclk(struct radeon_device *rdev);
777 void dce8_bandwidth_update(struct radeon_device *rdev);
782 uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev);
783 u32 cik_get_xclk(struct radeon_device *rdev);
784 uint32_t cik_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
785 void cik_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
786 int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
787 int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk);
788 void cik_sdma_fence_ring_emit(struct radeon_device *rdev,
790 bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev,
794 void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
795 struct radeon_fence *cik_copy_dma(struct radeon_device *rdev,
799 struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev,
803 int cik_sdma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
804 int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
805 bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
806 void cik_fence_gfx_ring_emit(struct radeon_device *rdev,
808 void cik_fence_compute_ring_emit(struct radeon_device *rdev,
810 bool cik_semaphore_ring_emit(struct radeon_device *rdev,
814 void cik_pcie_gart_tlb_flush(struct radeon_device *rdev);
815 int cik_init(struct radeon_device *rdev);
816 void cik_fini(struct radeon_device *rdev);
817 int cik_suspend(struct radeon_device *rdev);
818 int cik_resume(struct radeon_device *rdev);
819 bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
820 int cik_asic_reset(struct radeon_device *rdev);
821 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
822 int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
823 int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
824 int cik_irq_set(struct radeon_device *rdev);
825 int cik_irq_process(struct radeon_device *rdev);
826 int cik_vm_init(struct radeon_device *rdev);
827 void cik_vm_fini(struct radeon_device *rdev);
828 void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
831 void cik_sdma_vm_copy_pages(struct radeon_device *rdev,
835 void cik_sdma_vm_write_pages(struct radeon_device *rdev,
840 void cik_sdma_vm_set_pages(struct radeon_device *rdev,
847 void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
849 int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
850 u32 cik_gfx_get_rptr(struct radeon_device *rdev,
852 u32 cik_gfx_get_wptr(struct radeon_device *rdev,
854 void cik_gfx_set_wptr(struct radeon_device *rdev,
856 u32 cik_compute_get_rptr(struct radeon_device *rdev,
858 u32 cik_compute_get_wptr(struct radeon_device *rdev,
860 void cik_compute_set_wptr(struct radeon_device *rdev,
862 u32 cik_sdma_get_rptr(struct radeon_device *rdev,
864 u32 cik_sdma_get_wptr(struct radeon_device *rdev,
866 void cik_sdma_set_wptr(struct radeon_device *rdev,
868 int ci_get_temp(struct radeon_device *rdev);
869 int kv_get_temp(struct radeon_device *rdev);
870 int cik_get_allowed_info_register(struct radeon_device *rdev,
873 int ci_dpm_init(struct radeon_device *rdev);
874 int ci_dpm_enable(struct radeon_device *rdev);
875 int ci_dpm_late_enable(struct radeon_device *rdev);
876 void ci_dpm_disable(struct radeon_device *rdev);
877 int ci_dpm_pre_set_power_state(struct radeon_device *rdev);
878 int ci_dpm_set_power_state(struct radeon_device *rdev);
879 void ci_dpm_post_set_power_state(struct radeon_device *rdev);
880 void ci_dpm_setup_asic(struct radeon_device *rdev);
881 void ci_dpm_display_configuration_changed(struct radeon_device *rdev);
882 void ci_dpm_fini(struct radeon_device *rdev);
883 u32 ci_dpm_get_sclk(struct radeon_device *rdev, bool low);
884 u32 ci_dpm_get_mclk(struct radeon_device *rdev, bool low);
885 void ci_dpm_print_power_state(struct radeon_device *rdev,
887 void ci_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
889 int ci_dpm_force_performance_level(struct radeon_device *rdev,
891 bool ci_dpm_vblank_too_short(struct radeon_device *rdev);
892 void ci_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
893 u32 ci_dpm_get_current_sclk(struct radeon_device *rdev);
894 u32 ci_dpm_get_current_mclk(struct radeon_device *rdev);
896 int ci_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev,
898 int ci_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev,
900 u32 ci_fan_ctrl_get_mode(struct radeon_device *rdev);
901 void ci_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode);
903 int kv_dpm_init(struct radeon_device *rdev);
904 int kv_dpm_enable(struct radeon_device *rdev);
905 int kv_dpm_late_enable(struct radeon_device *rdev);
906 void kv_dpm_disable(struct radeon_device *rdev);
907 int kv_dpm_pre_set_power_state(struct radeon_device *rdev);
908 int kv_dpm_set_power_state(struct radeon_device *rdev);
909 void kv_dpm_post_set_power_state(struct radeon_device *rdev);
910 void kv_dpm_setup_asic(struct radeon_device *rdev);
911 void kv_dpm_display_configuration_changed(struct radeon_device *rdev);
912 void kv_dpm_fini(struct radeon_device *rdev);
913 u32 kv_dpm_get_sclk(struct radeon_device *rdev, bool low);
914 u32 kv_dpm_get_mclk(struct radeon_device *rdev, bool low);
915 void kv_dpm_print_power_state(struct radeon_device *rdev,
917 void kv_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
919 int kv_dpm_force_performance_level(struct radeon_device *rdev,
921 void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
922 void kv_dpm_enable_bapm(struct radeon_device *rdev, bool enable);
923 u32 kv_dpm_get_current_sclk(struct radeon_device *rdev);
924 u32 kv_dpm_get_current_mclk(struct radeon_device *rdev);
927 uint32_t uvd_v1_0_get_rptr(struct radeon_device *rdev,
929 uint32_t uvd_v1_0_get_wptr(struct radeon_device *rdev,
931 void uvd_v1_0_set_wptr(struct radeon_device *rdev,
933 int uvd_v1_0_resume(struct radeon_device *rdev);
935 int uvd_v1_0_init(struct radeon_device *rdev);
936 void uvd_v1_0_fini(struct radeon_device *rdev);
937 int uvd_v1_0_start(struct radeon_device *rdev);
938 void uvd_v1_0_stop(struct radeon_device *rdev);
940 int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
941 void uvd_v1_0_fence_emit(struct radeon_device *rdev,
943 int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
944 bool uvd_v1_0_semaphore_emit(struct radeon_device *rdev,
948 void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
951 int uvd_v2_2_resume(struct radeon_device *rdev);
952 void uvd_v2_2_fence_emit(struct radeon_device *rdev,
954 bool uvd_v2_2_semaphore_emit(struct radeon_device *rdev,
960 bool uvd_v3_1_semaphore_emit(struct radeon_device *rdev,
966 int uvd_v4_2_resume(struct radeon_device *rdev);
969 uint32_t vce_v1_0_get_rptr(struct radeon_device *rdev,
971 uint32_t vce_v1_0_get_wptr(struct radeon_device *rdev,
973 void vce_v1_0_set_wptr(struct radeon_device *rdev,
975 int vce_v1_0_load_fw(struct radeon_device *rdev, uint32_t *data);
976 unsigned vce_v1_0_bo_size(struct radeon_device *rdev);
977 int vce_v1_0_resume(struct radeon_device *rdev);
978 int vce_v1_0_init(struct radeon_device *rdev);
979 int vce_v1_0_start(struct radeon_device *rdev);
982 unsigned vce_v2_0_bo_size(struct radeon_device *rdev);
983 int vce_v2_0_resume(struct radeon_device *rdev);
H A Dr520.c36 int r520_mc_wait_for_idle(struct radeon_device *rdev) r520_mc_wait_for_idle() argument
41 for (i = 0; i < rdev->usec_timeout; i++) { r520_mc_wait_for_idle()
52 static void r520_gpu_init(struct radeon_device *rdev) r520_gpu_init() argument
56 rv515_vga_render_disable(rdev); r520_gpu_init()
78 if (rdev->family == CHIP_RV530) { r520_gpu_init()
81 r420_pipes_init(rdev); r520_gpu_init()
88 if (r520_mc_wait_for_idle(rdev)) { r520_gpu_init()
94 static void r520_vram_get_type(struct radeon_device *rdev) r520_vram_get_type() argument
98 rdev->mc.vram_width = 128; r520_vram_get_type()
99 rdev->mc.vram_is_ddr = true; r520_vram_get_type()
103 rdev->mc.vram_width = 32; r520_vram_get_type()
106 rdev->mc.vram_width = 64; r520_vram_get_type()
109 rdev->mc.vram_width = 128; r520_vram_get_type()
112 rdev->mc.vram_width = 256; r520_vram_get_type()
115 rdev->mc.vram_width = 128; r520_vram_get_type()
119 rdev->mc.vram_width *= 2; r520_vram_get_type()
122 static void r520_mc_init(struct radeon_device *rdev) r520_mc_init() argument
125 r520_vram_get_type(rdev); r520_mc_init()
126 r100_vram_init_sizes(rdev); r520_mc_init()
127 radeon_vram_location(rdev, &rdev->mc, 0); r520_mc_init()
128 rdev->mc.gtt_base_align = 0; r520_mc_init()
129 if (!(rdev->flags & RADEON_IS_AGP)) r520_mc_init()
130 radeon_gtt_location(rdev, &rdev->mc); r520_mc_init()
131 radeon_update_bandwidth_info(rdev); r520_mc_init()
134 static void r520_mc_program(struct radeon_device *rdev) r520_mc_program() argument
139 rv515_mc_stop(rdev, &save); r520_mc_program()
142 if (r520_mc_wait_for_idle(rdev)) r520_mc_program()
143 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); r520_mc_program()
145 WREG32(R_0000F8_CONFIG_MEMSIZE, rdev->mc.real_vram_size); r520_mc_program()
148 S_000004_MC_FB_START(rdev->mc.vram_start >> 16) | r520_mc_program()
149 S_000004_MC_FB_TOP(rdev->mc.vram_end >> 16)); r520_mc_program()
151 S_000134_HDP_FB_START(rdev->mc.vram_start >> 16)); r520_mc_program()
152 if (rdev->flags & RADEON_IS_AGP) { r520_mc_program()
154 S_000005_MC_AGP_START(rdev->mc.gtt_start >> 16) | r520_mc_program()
155 S_000005_MC_AGP_TOP(rdev->mc.gtt_end >> 16)); r520_mc_program()
156 WREG32_MC(R_000006_AGP_BASE, lower_32_bits(rdev->mc.agp_base)); r520_mc_program()
158 S_000007_AGP_BASE_ADDR_2(upper_32_bits(rdev->mc.agp_base))); r520_mc_program()
165 rv515_mc_resume(rdev, &save); r520_mc_program()
168 static int r520_startup(struct radeon_device *rdev) r520_startup() argument
172 r520_mc_program(rdev); r520_startup()
174 rv515_clock_startup(rdev); r520_startup()
176 r520_gpu_init(rdev); r520_startup()
179 if (rdev->flags & RADEON_IS_PCIE) { r520_startup()
180 r = rv370_pcie_gart_enable(rdev); r520_startup()
186 r = radeon_wb_init(rdev); r520_startup()
190 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); r520_startup()
192 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); r520_startup()
197 if (!rdev->irq.installed) { r520_startup()
198 r = radeon_irq_kms_init(rdev); r520_startup()
203 rs600_irq_set(rdev); r520_startup()
204 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); r520_startup()
206 r = r100_cp_init(rdev, 1024 * 1024); r520_startup()
208 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); r520_startup()
212 r = radeon_ib_pool_init(rdev); r520_startup()
214 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); r520_startup()
221 int r520_resume(struct radeon_device *rdev) r520_resume() argument
226 if (rdev->flags & RADEON_IS_PCIE) r520_resume()
227 rv370_pcie_gart_disable(rdev); r520_resume()
229 rv515_clock_startup(rdev); r520_resume()
231 if (radeon_asic_reset(rdev)) { r520_resume()
232 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", r520_resume()
237 atom_asic_init(rdev->mode_info.atom_context); r520_resume()
239 rv515_clock_startup(rdev); r520_resume()
241 radeon_surface_init(rdev); r520_resume()
243 rdev->accel_working = true; r520_resume()
244 r = r520_startup(rdev); r520_resume()
246 rdev->accel_working = false; r520_resume()
251 int r520_init(struct radeon_device *rdev) r520_init() argument
256 radeon_scratch_init(rdev); r520_init()
258 radeon_surface_init(rdev); r520_init()
260 r100_restore_sanity(rdev); r520_init()
263 if (!radeon_get_bios(rdev)) { r520_init()
264 if (ASIC_IS_AVIVO(rdev)) r520_init()
267 if (rdev->is_atom_bios) { r520_init()
268 r = radeon_atombios_init(rdev); r520_init()
272 dev_err(rdev->dev, "Expecting atombios for RV515 GPU\n"); r520_init()
276 if (radeon_asic_reset(rdev)) { r520_init()
277 dev_warn(rdev->dev, r520_init()
283 if (radeon_boot_test_post_card(rdev) == false) r520_init()
286 if (!radeon_card_posted(rdev) && rdev->bios) { r520_init()
288 atom_asic_init(rdev->mode_info.atom_context); r520_init()
291 radeon_get_clock_info(rdev->ddev); r520_init()
293 if (rdev->flags & RADEON_IS_AGP) { r520_init()
294 r = radeon_agp_init(rdev); r520_init()
296 radeon_agp_disable(rdev); r520_init()
300 r520_mc_init(rdev); r520_init()
301 rv515_debugfs(rdev); r520_init()
303 r = radeon_fence_driver_init(rdev); r520_init()
307 r = radeon_bo_init(rdev); r520_init()
310 r = rv370_pcie_gart_init(rdev); r520_init()
313 rv515_set_safe_registers(rdev); r520_init()
316 radeon_pm_init(rdev); r520_init()
318 rdev->accel_working = true; r520_init()
319 r = r520_startup(rdev); r520_init()
322 dev_err(rdev->dev, "Disabling GPU acceleration\n"); r520_init()
323 r100_cp_fini(rdev); r520_init()
324 radeon_wb_fini(rdev); r520_init()
325 radeon_ib_pool_fini(rdev); r520_init()
326 radeon_irq_kms_fini(rdev); r520_init()
327 rv370_pcie_gart_fini(rdev); r520_init()
328 radeon_agp_fini(rdev); r520_init()
329 rdev->accel_working = false; r520_init()
H A Dr420.c39 void r420_pm_init_profile(struct radeon_device *rdev) r420_pm_init_profile() argument
42 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
43 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
44 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; r420_pm_init_profile()
49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
52 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
53 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1; r420_pm_init_profile()
54 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
55 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
57 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
58 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
59 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
60 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
62 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
63 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
64 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
65 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
67 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
68 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
69 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
70 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
72 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; r420_pm_init_profile()
73 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r420_pm_init_profile()
74 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; r420_pm_init_profile()
75 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; r420_pm_init_profile()
78 static void r420_set_reg_safe(struct radeon_device *rdev) r420_set_reg_safe() argument
80 rdev->config.r300.reg_safe_bm = r420_reg_safe_bm; r420_set_reg_safe()
81 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r420_reg_safe_bm); r420_set_reg_safe()
84 void r420_pipes_init(struct radeon_device *rdev) r420_pipes_init() argument
94 if (r100_gui_wait_for_idle(rdev)) { r420_pipes_init()
103 if ((rdev->pdev->device == 0x5e4c) || r420_pipes_init()
104 (rdev->pdev->device == 0x5e4f)) r420_pipes_init()
107 rdev->num_gb_pipes = num_pipes; r420_pipes_init()
130 if (r100_gui_wait_for_idle(rdev)) { r420_pipes_init()
143 if (r100_gui_wait_for_idle(rdev)) { r420_pipes_init()
148 if (rdev->family == CHIP_RV530) { r420_pipes_init()
151 rdev->num_z_pipes = 2; r420_pipes_init()
153 rdev->num_z_pipes = 1; r420_pipes_init()
155 rdev->num_z_pipes = 1; r420_pipes_init()
158 rdev->num_gb_pipes, rdev->num_z_pipes); r420_pipes_init()
161 u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg) r420_mc_rreg() argument
166 spin_lock_irqsave(&rdev->mc_idx_lock, flags); r420_mc_rreg()
169 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); r420_mc_rreg()
173 void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v) r420_mc_wreg() argument
177 spin_lock_irqsave(&rdev->mc_idx_lock, flags); r420_mc_wreg()
181 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); r420_mc_wreg()
184 static void r420_debugfs(struct radeon_device *rdev) r420_debugfs() argument
186 if (r100_debugfs_rbbm_init(rdev)) { r420_debugfs()
189 if (r420_debugfs_pipes_info_init(rdev)) { r420_debugfs()
194 static void r420_clock_resume(struct radeon_device *rdev) r420_clock_resume() argument
199 radeon_atom_set_clock_gating(rdev, 1); r420_clock_resume()
202 if (rdev->family == CHIP_R420) r420_clock_resume()
207 static void r420_cp_errata_init(struct radeon_device *rdev) r420_cp_errata_init() argument
209 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r420_cp_errata_init()
217 radeon_scratch_get(rdev, &rdev->config.r300.resync_scratch); r420_cp_errata_init()
218 radeon_ring_lock(rdev, ring, 8); r420_cp_errata_init()
220 radeon_ring_write(ring, rdev->config.r300.resync_scratch); r420_cp_errata_init()
222 radeon_ring_unlock_commit(rdev, ring, false); r420_cp_errata_init()
225 static void r420_cp_errata_fini(struct radeon_device *rdev) r420_cp_errata_fini() argument
227 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r420_cp_errata_fini()
232 radeon_ring_lock(rdev, ring, 8); r420_cp_errata_fini()
235 radeon_ring_unlock_commit(rdev, ring, false); r420_cp_errata_fini()
236 radeon_scratch_free(rdev, rdev->config.r300.resync_scratch); r420_cp_errata_fini()
239 static int r420_startup(struct radeon_device *rdev) r420_startup() argument
244 r100_set_common_regs(rdev); r420_startup()
246 r300_mc_program(rdev); r420_startup()
248 r420_clock_resume(rdev); r420_startup()
251 if (rdev->flags & RADEON_IS_PCIE) { r420_startup()
252 r = rv370_pcie_gart_enable(rdev); r420_startup()
256 if (rdev->flags & RADEON_IS_PCI) { r420_startup()
257 r = r100_pci_gart_enable(rdev); r420_startup()
261 r420_pipes_init(rdev); r420_startup()
264 r = radeon_wb_init(rdev); r420_startup()
268 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); r420_startup()
270 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); r420_startup()
275 if (!rdev->irq.installed) { r420_startup()
276 r = radeon_irq_kms_init(rdev); r420_startup()
281 r100_irq_set(rdev); r420_startup()
282 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); r420_startup()
284 r = r100_cp_init(rdev, 1024 * 1024); r420_startup()
286 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); r420_startup()
289 r420_cp_errata_init(rdev); r420_startup()
291 r = radeon_ib_pool_init(rdev); r420_startup()
293 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); r420_startup()
300 int r420_resume(struct radeon_device *rdev) r420_resume() argument
305 if (rdev->flags & RADEON_IS_PCIE) r420_resume()
306 rv370_pcie_gart_disable(rdev); r420_resume()
307 if (rdev->flags & RADEON_IS_PCI) r420_resume()
308 r100_pci_gart_disable(rdev); r420_resume()
310 r420_clock_resume(rdev); r420_resume()
312 if (radeon_asic_reset(rdev)) { r420_resume()
313 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", r420_resume()
318 if (rdev->is_atom_bios) { r420_resume()
319 atom_asic_init(rdev->mode_info.atom_context); r420_resume()
321 radeon_combios_asic_init(rdev->ddev); r420_resume()
324 r420_clock_resume(rdev); r420_resume()
326 radeon_surface_init(rdev); r420_resume()
328 rdev->accel_working = true; r420_resume()
329 r = r420_startup(rdev); r420_resume()
331 rdev->accel_working = false; r420_resume()
336 int r420_suspend(struct radeon_device *rdev) r420_suspend() argument
338 radeon_pm_suspend(rdev); r420_suspend()
339 r420_cp_errata_fini(rdev); r420_suspend()
340 r100_cp_disable(rdev); r420_suspend()
341 radeon_wb_disable(rdev); r420_suspend()
342 r100_irq_disable(rdev); r420_suspend()
343 if (rdev->flags & RADEON_IS_PCIE) r420_suspend()
344 rv370_pcie_gart_disable(rdev); r420_suspend()
345 if (rdev->flags & RADEON_IS_PCI) r420_suspend()
346 r100_pci_gart_disable(rdev); r420_suspend()
350 void r420_fini(struct radeon_device *rdev) r420_fini() argument
352 radeon_pm_fini(rdev); r420_fini()
353 r100_cp_fini(rdev); r420_fini()
354 radeon_wb_fini(rdev); r420_fini()
355 radeon_ib_pool_fini(rdev); r420_fini()
356 radeon_gem_fini(rdev); r420_fini()
357 if (rdev->flags & RADEON_IS_PCIE) r420_fini()
358 rv370_pcie_gart_fini(rdev); r420_fini()
359 if (rdev->flags & RADEON_IS_PCI) r420_fini()
360 r100_pci_gart_fini(rdev); r420_fini()
361 radeon_agp_fini(rdev); r420_fini()
362 radeon_irq_kms_fini(rdev); r420_fini()
363 radeon_fence_driver_fini(rdev); r420_fini()
364 radeon_bo_fini(rdev); r420_fini()
365 if (rdev->is_atom_bios) { r420_fini()
366 radeon_atombios_fini(rdev); r420_fini()
368 radeon_combios_fini(rdev); r420_fini()
370 kfree(rdev->bios); r420_fini()
371 rdev->bios = NULL; r420_fini()
374 int r420_init(struct radeon_device *rdev) r420_init() argument
379 radeon_scratch_init(rdev); r420_init()
381 radeon_surface_init(rdev); r420_init()
384 r100_restore_sanity(rdev); r420_init()
386 if (!radeon_get_bios(rdev)) { r420_init()
387 if (ASIC_IS_AVIVO(rdev)) r420_init()
390 if (rdev->is_atom_bios) { r420_init()
391 r = radeon_atombios_init(rdev); r420_init()
396 r = radeon_combios_init(rdev); r420_init()
402 if (radeon_asic_reset(rdev)) { r420_init()
403 dev_warn(rdev->dev, r420_init()
409 if (radeon_boot_test_post_card(rdev) == false) r420_init()
413 radeon_get_clock_info(rdev->ddev); r420_init()
415 if (rdev->flags & RADEON_IS_AGP) { r420_init()
416 r = radeon_agp_init(rdev); r420_init()
418 radeon_agp_disable(rdev); r420_init()
422 r300_mc_init(rdev); r420_init()
423 r420_debugfs(rdev); r420_init()
425 r = radeon_fence_driver_init(rdev); r420_init()
430 r = radeon_bo_init(rdev); r420_init()
434 if (rdev->family == CHIP_R420) r420_init()
435 r100_enable_bm(rdev); r420_init()
437 if (rdev->flags & RADEON_IS_PCIE) { r420_init()
438 r = rv370_pcie_gart_init(rdev); r420_init()
442 if (rdev->flags & RADEON_IS_PCI) { r420_init()
443 r = r100_pci_gart_init(rdev); r420_init()
447 r420_set_reg_safe(rdev); r420_init()
450 radeon_pm_init(rdev); r420_init()
452 rdev->accel_working = true; r420_init()
453 r = r420_startup(rdev); r420_init()
456 dev_err(rdev->dev, "Disabling GPU acceleration\n"); r420_init()
457 r100_cp_fini(rdev); r420_init()
458 radeon_wb_fini(rdev); r420_init()
459 radeon_ib_pool_fini(rdev); r420_init()
460 radeon_irq_kms_fini(rdev); r420_init()
461 if (rdev->flags & RADEON_IS_PCIE) r420_init()
462 rv370_pcie_gart_fini(rdev); r420_init()
463 if (rdev->flags & RADEON_IS_PCI) r420_init()
464 r100_pci_gart_fini(rdev); r420_init()
465 radeon_agp_fini(rdev); r420_init()
466 rdev->accel_working = false; r420_init()
479 struct radeon_device *rdev = dev->dev_private; r420_debugfs_pipes_info() local
496 int r420_debugfs_pipes_info_init(struct radeon_device *rdev) r420_debugfs_pipes_info_init() argument
499 return radeon_debugfs_add_files(rdev, r420_pipes_info_list, 1); r420_debugfs_pipes_info_init()
H A Dradeon_pm.c45 static int radeon_debugfs_pm_init(struct radeon_device *rdev);
46 static bool radeon_pm_in_vbl(struct radeon_device *rdev);
47 static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish);
48 static void radeon_pm_update_profile(struct radeon_device *rdev);
49 static void radeon_pm_set_clocks(struct radeon_device *rdev);
51 int radeon_pm_get_type_index(struct radeon_device *rdev, radeon_pm_get_type_index() argument
58 for (i = 0; i < rdev->pm.num_power_states; i++) { radeon_pm_get_type_index()
59 if (rdev->pm.power_state[i].type == ps_type) { radeon_pm_get_type_index()
66 return rdev->pm.default_power_state_index; radeon_pm_get_type_index()
69 void radeon_pm_acpi_event_handler(struct radeon_device *rdev) radeon_pm_acpi_event_handler() argument
71 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_pm_acpi_event_handler()
72 mutex_lock(&rdev->pm.mutex); radeon_pm_acpi_event_handler()
74 rdev->pm.dpm.ac_power = true; radeon_pm_acpi_event_handler()
76 rdev->pm.dpm.ac_power = false; radeon_pm_acpi_event_handler()
77 if (rdev->family == CHIP_ARUBA) { radeon_pm_acpi_event_handler()
78 if (rdev->asic->dpm.enable_bapm) radeon_pm_acpi_event_handler()
79 radeon_dpm_enable_bapm(rdev, rdev->pm.dpm.ac_power); radeon_pm_acpi_event_handler()
81 mutex_unlock(&rdev->pm.mutex); radeon_pm_acpi_event_handler()
82 } else if (rdev->pm.pm_method == PM_METHOD_PROFILE) { radeon_pm_acpi_event_handler()
83 if (rdev->pm.profile == PM_PROFILE_AUTO) { radeon_pm_acpi_event_handler()
84 mutex_lock(&rdev->pm.mutex); radeon_pm_acpi_event_handler()
85 radeon_pm_update_profile(rdev); radeon_pm_acpi_event_handler()
86 radeon_pm_set_clocks(rdev); radeon_pm_acpi_event_handler()
87 mutex_unlock(&rdev->pm.mutex); radeon_pm_acpi_event_handler()
92 static void radeon_pm_update_profile(struct radeon_device *rdev) radeon_pm_update_profile() argument
94 switch (rdev->pm.profile) { radeon_pm_update_profile()
96 rdev->pm.profile_index = PM_PROFILE_DEFAULT_IDX; radeon_pm_update_profile()
100 if (rdev->pm.active_crtc_count > 1) radeon_pm_update_profile()
101 rdev->pm.profile_index = PM_PROFILE_HIGH_MH_IDX; radeon_pm_update_profile()
103 rdev->pm.profile_index = PM_PROFILE_HIGH_SH_IDX; radeon_pm_update_profile()
105 if (rdev->pm.active_crtc_count > 1) radeon_pm_update_profile()
106 rdev->pm.profile_index = PM_PROFILE_MID_MH_IDX; radeon_pm_update_profile()
108 rdev->pm.profile_index = PM_PROFILE_MID_SH_IDX; radeon_pm_update_profile()
112 if (rdev->pm.active_crtc_count > 1) radeon_pm_update_profile()
113 rdev->pm.profile_index = PM_PROFILE_LOW_MH_IDX; radeon_pm_update_profile()
115 rdev->pm.profile_index = PM_PROFILE_LOW_SH_IDX; radeon_pm_update_profile()
118 if (rdev->pm.active_crtc_count > 1) radeon_pm_update_profile()
119 rdev->pm.profile_index = PM_PROFILE_MID_MH_IDX; radeon_pm_update_profile()
121 rdev->pm.profile_index = PM_PROFILE_MID_SH_IDX; radeon_pm_update_profile()
124 if (rdev->pm.active_crtc_count > 1) radeon_pm_update_profile()
125 rdev->pm.profile_index = PM_PROFILE_HIGH_MH_IDX; radeon_pm_update_profile()
127 rdev->pm.profile_index = PM_PROFILE_HIGH_SH_IDX; radeon_pm_update_profile()
131 if (rdev->pm.active_crtc_count == 0) { radeon_pm_update_profile()
132 rdev->pm.requested_power_state_index = radeon_pm_update_profile()
133 rdev->pm.profiles[rdev->pm.profile_index].dpms_off_ps_idx; radeon_pm_update_profile()
134 rdev->pm.requested_clock_mode_index = radeon_pm_update_profile()
135 rdev->pm.profiles[rdev->pm.profile_index].dpms_off_cm_idx; radeon_pm_update_profile()
137 rdev->pm.requested_power_state_index = radeon_pm_update_profile()
138 rdev->pm.profiles[rdev->pm.profile_index].dpms_on_ps_idx; radeon_pm_update_profile()
139 rdev->pm.requested_clock_mode_index = radeon_pm_update_profile()
140 rdev->pm.profiles[rdev->pm.profile_index].dpms_on_cm_idx; radeon_pm_update_profile()
144 static void radeon_unmap_vram_bos(struct radeon_device *rdev) radeon_unmap_vram_bos() argument
148 if (list_empty(&rdev->gem.objects)) radeon_unmap_vram_bos()
151 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { radeon_unmap_vram_bos()
157 static void radeon_sync_with_vblank(struct radeon_device *rdev) radeon_sync_with_vblank() argument
159 if (rdev->pm.active_crtcs) { radeon_sync_with_vblank()
160 rdev->pm.vblank_sync = false; radeon_sync_with_vblank()
162 rdev->irq.vblank_queue, rdev->pm.vblank_sync, radeon_sync_with_vblank()
167 static void radeon_set_power_state(struct radeon_device *rdev) radeon_set_power_state() argument
172 if ((rdev->pm.requested_clock_mode_index == rdev->pm.current_clock_mode_index) && radeon_set_power_state()
173 (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index)) radeon_set_power_state()
176 if (radeon_gui_idle(rdev)) { radeon_set_power_state()
177 sclk = rdev->pm.power_state[rdev->pm.requested_power_state_index]. radeon_set_power_state()
178 clock_info[rdev->pm.requested_clock_mode_index].sclk; radeon_set_power_state()
179 if (sclk > rdev->pm.default_sclk) radeon_set_power_state()
180 sclk = rdev->pm.default_sclk; radeon_set_power_state()
186 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) && radeon_set_power_state()
187 (rdev->family >= CHIP_BARTS) && radeon_set_power_state()
188 rdev->pm.active_crtc_count && radeon_set_power_state()
189 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) || radeon_set_power_state()
190 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX))) radeon_set_power_state()
191 mclk = rdev->pm.power_state[rdev->pm.requested_power_state_index]. radeon_set_power_state()
192 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].mclk; radeon_set_power_state()
194 mclk = rdev->pm.power_state[rdev->pm.requested_power_state_index]. radeon_set_power_state()
195 clock_info[rdev->pm.requested_clock_mode_index].mclk; radeon_set_power_state()
197 if (mclk > rdev->pm.default_mclk) radeon_set_power_state()
198 mclk = rdev->pm.default_mclk; radeon_set_power_state()
201 if (sclk < rdev->pm.current_sclk) radeon_set_power_state()
204 radeon_sync_with_vblank(rdev); radeon_set_power_state()
206 if (rdev->pm.pm_method == PM_METHOD_DYNPM) { radeon_set_power_state()
207 if (!radeon_pm_in_vbl(rdev)) radeon_set_power_state()
211 radeon_pm_prepare(rdev); radeon_set_power_state()
215 radeon_pm_misc(rdev); radeon_set_power_state()
218 if (sclk != rdev->pm.current_sclk) { radeon_set_power_state()
219 radeon_pm_debug_check_in_vbl(rdev, false); radeon_set_power_state()
220 radeon_set_engine_clock(rdev, sclk); radeon_set_power_state()
221 radeon_pm_debug_check_in_vbl(rdev, true); radeon_set_power_state()
222 rdev->pm.current_sclk = sclk; radeon_set_power_state()
227 if (rdev->asic->pm.set_memory_clock && (mclk != rdev->pm.current_mclk)) { radeon_set_power_state()
228 radeon_pm_debug_check_in_vbl(rdev, false); radeon_set_power_state()
229 radeon_set_memory_clock(rdev, mclk); radeon_set_power_state()
230 radeon_pm_debug_check_in_vbl(rdev, true); radeon_set_power_state()
231 rdev->pm.current_mclk = mclk; radeon_set_power_state()
237 radeon_pm_misc(rdev); radeon_set_power_state()
239 radeon_pm_finish(rdev); radeon_set_power_state()
241 rdev->pm.current_power_state_index = rdev->pm.requested_power_state_index; radeon_set_power_state()
242 rdev->pm.current_clock_mode_index = rdev->pm.requested_clock_mode_index; radeon_set_power_state()
247 static void radeon_pm_set_clocks(struct radeon_device *rdev) radeon_pm_set_clocks() argument
252 if ((rdev->pm.requested_clock_mode_index == rdev->pm.current_clock_mode_index) && radeon_pm_set_clocks()
253 (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index)) radeon_pm_set_clocks()
256 down_write(&rdev->pm.mclk_lock); radeon_pm_set_clocks()
257 mutex_lock(&rdev->ring_lock); radeon_pm_set_clocks()
261 struct radeon_ring *ring = &rdev->ring[i]; radeon_pm_set_clocks()
265 r = radeon_fence_wait_empty(rdev, i); radeon_pm_set_clocks()
268 mutex_unlock(&rdev->ring_lock); radeon_pm_set_clocks()
269 up_write(&rdev->pm.mclk_lock); radeon_pm_set_clocks()
274 radeon_unmap_vram_bos(rdev); radeon_pm_set_clocks()
276 if (rdev->irq.installed) { radeon_pm_set_clocks()
277 for (i = 0; i < rdev->num_crtc; i++) { radeon_pm_set_clocks()
278 if (rdev->pm.active_crtcs & (1 << i)) { radeon_pm_set_clocks()
279 rdev->pm.req_vblank |= (1 << i); radeon_pm_set_clocks()
280 drm_vblank_get(rdev->ddev, i); radeon_pm_set_clocks()
285 radeon_set_power_state(rdev); radeon_pm_set_clocks()
287 if (rdev->irq.installed) { radeon_pm_set_clocks()
288 for (i = 0; i < rdev->num_crtc; i++) { radeon_pm_set_clocks()
289 if (rdev->pm.req_vblank & (1 << i)) { radeon_pm_set_clocks()
290 rdev->pm.req_vblank &= ~(1 << i); radeon_pm_set_clocks()
291 drm_vblank_put(rdev->ddev, i); radeon_pm_set_clocks()
297 radeon_update_bandwidth_info(rdev); radeon_pm_set_clocks()
298 if (rdev->pm.active_crtc_count) radeon_pm_set_clocks()
299 radeon_bandwidth_update(rdev); radeon_pm_set_clocks()
301 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; radeon_pm_set_clocks()
303 mutex_unlock(&rdev->ring_lock); radeon_pm_set_clocks()
304 up_write(&rdev->pm.mclk_lock); radeon_pm_set_clocks()
307 static void radeon_pm_print_states(struct radeon_device *rdev) radeon_pm_print_states() argument
313 DRM_DEBUG_DRIVER("%d Power State(s)\n", rdev->pm.num_power_states); radeon_pm_print_states()
314 for (i = 0; i < rdev->pm.num_power_states; i++) { radeon_pm_print_states()
315 power_state = &rdev->pm.power_state[i]; radeon_pm_print_states()
318 if (i == rdev->pm.default_power_state_index) radeon_pm_print_states()
320 if ((rdev->flags & RADEON_IS_PCIE) && !(rdev->flags & RADEON_IS_IGP)) radeon_pm_print_states()
327 if (rdev->flags & RADEON_IS_IGP) radeon_pm_print_states()
346 struct radeon_device *rdev = ddev->dev_private; radeon_get_pm_profile() local
347 int cp = rdev->pm.profile; radeon_get_pm_profile()
362 struct radeon_device *rdev = ddev->dev_private; radeon_set_pm_profile() local
365 if ((rdev->flags & RADEON_IS_PX) && radeon_set_pm_profile()
369 mutex_lock(&rdev->pm.mutex); radeon_set_pm_profile()
370 if (rdev->pm.pm_method == PM_METHOD_PROFILE) { radeon_set_pm_profile()
372 rdev->pm.profile = PM_PROFILE_DEFAULT; radeon_set_pm_profile()
374 rdev->pm.profile = PM_PROFILE_AUTO; radeon_set_pm_profile()
376 rdev->pm.profile = PM_PROFILE_LOW; radeon_set_pm_profile()
378 rdev->pm.profile = PM_PROFILE_MID; radeon_set_pm_profile()
380 rdev->pm.profile = PM_PROFILE_HIGH; radeon_set_pm_profile()
385 radeon_pm_update_profile(rdev); radeon_set_pm_profile()
386 radeon_pm_set_clocks(rdev); radeon_set_pm_profile()
391 mutex_unlock(&rdev->pm.mutex); radeon_set_pm_profile()
401 struct radeon_device *rdev = ddev->dev_private; radeon_get_pm_method() local
402 int pm = rdev->pm.pm_method; radeon_get_pm_method()
415 struct radeon_device *rdev = ddev->dev_private; radeon_set_pm_method() local
418 if ((rdev->flags & RADEON_IS_PX) && radeon_set_pm_method()
425 if (rdev->pm.pm_method == PM_METHOD_DPM) { radeon_set_pm_method()
431 mutex_lock(&rdev->pm.mutex); radeon_set_pm_method()
432 rdev->pm.pm_method = PM_METHOD_DYNPM; radeon_set_pm_method()
433 rdev->pm.dynpm_state = DYNPM_STATE_PAUSED; radeon_set_pm_method()
434 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT; radeon_set_pm_method()
435 mutex_unlock(&rdev->pm.mutex); radeon_set_pm_method()
437 mutex_lock(&rdev->pm.mutex); radeon_set_pm_method()
439 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; radeon_set_pm_method()
440 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; radeon_set_pm_method()
441 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_set_pm_method()
442 mutex_unlock(&rdev->pm.mutex); radeon_set_pm_method()
443 cancel_delayed_work_sync(&rdev->pm.dynpm_idle_work); radeon_set_pm_method()
448 radeon_pm_compute_clocks(rdev); radeon_set_pm_method()
458 struct radeon_device *rdev = ddev->dev_private; radeon_get_dpm_state() local
459 enum radeon_pm_state_type pm = rdev->pm.dpm.user_state; radeon_get_dpm_state()
472 struct radeon_device *rdev = ddev->dev_private; radeon_set_dpm_state() local
474 mutex_lock(&rdev->pm.mutex); radeon_set_dpm_state()
476 rdev->pm.dpm.user_state = POWER_STATE_TYPE_BATTERY; radeon_set_dpm_state()
478 rdev->pm.dpm.user_state = POWER_STATE_TYPE_BALANCED; radeon_set_dpm_state()
480 rdev->pm.dpm.user_state = POWER_STATE_TYPE_PERFORMANCE; radeon_set_dpm_state()
482 mutex_unlock(&rdev->pm.mutex); radeon_set_dpm_state()
486 mutex_unlock(&rdev->pm.mutex); radeon_set_dpm_state()
489 if (!(rdev->flags & RADEON_IS_PX) || radeon_set_dpm_state()
491 radeon_pm_compute_clocks(rdev); radeon_set_dpm_state()
502 struct radeon_device *rdev = ddev->dev_private; radeon_get_dpm_forced_performance_level() local
503 enum radeon_dpm_forced_level level = rdev->pm.dpm.forced_level; radeon_get_dpm_forced_performance_level()
505 if ((rdev->flags & RADEON_IS_PX) && radeon_get_dpm_forced_performance_level()
520 struct radeon_device *rdev = ddev->dev_private; radeon_set_dpm_forced_performance_level() local
525 if ((rdev->flags & RADEON_IS_PX) && radeon_set_dpm_forced_performance_level()
529 mutex_lock(&rdev->pm.mutex); radeon_set_dpm_forced_performance_level()
540 if (rdev->asic->dpm.force_performance_level) { radeon_set_dpm_forced_performance_level()
541 if (rdev->pm.dpm.thermal_active) { radeon_set_dpm_forced_performance_level()
545 ret = radeon_dpm_force_performance_level(rdev, level); radeon_set_dpm_forced_performance_level()
550 mutex_unlock(&rdev->pm.mutex); radeon_set_dpm_forced_performance_level()
559 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_get_pwm1_enable() local
562 if (rdev->asic->dpm.fan_ctrl_get_mode) radeon_hwmon_get_pwm1_enable()
563 pwm_mode = rdev->asic->dpm.fan_ctrl_get_mode(rdev); radeon_hwmon_get_pwm1_enable()
574 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_set_pwm1_enable() local
578 if(!rdev->asic->dpm.fan_ctrl_set_mode) radeon_hwmon_set_pwm1_enable()
587 rdev->asic->dpm.fan_ctrl_set_mode(rdev, FDO_PWM_MODE_STATIC); radeon_hwmon_set_pwm1_enable()
590 rdev->asic->dpm.fan_ctrl_set_mode(rdev, 0); radeon_hwmon_set_pwm1_enable()
615 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_set_pwm1() local
625 err = rdev->asic->dpm.set_fan_speed_percent(rdev, value); radeon_hwmon_set_pwm1()
636 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_get_pwm1() local
640 err = rdev->asic->dpm.get_fan_speed_percent(rdev, &speed); radeon_hwmon_get_pwm1()
660 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_show_temp() local
661 struct drm_device *ddev = rdev->ddev; radeon_hwmon_show_temp()
665 if ((rdev->flags & RADEON_IS_PX) && radeon_hwmon_show_temp()
669 if (rdev->asic->pm.get_temperature) radeon_hwmon_show_temp()
670 temp = radeon_get_temperature(rdev); radeon_hwmon_show_temp()
681 struct radeon_device *rdev = dev_get_drvdata(dev); radeon_hwmon_show_temp_thresh() local
686 temp = rdev->pm.dpm.thermal.min_temp; radeon_hwmon_show_temp_thresh()
688 temp = rdev->pm.dpm.thermal.max_temp; radeon_hwmon_show_temp_thresh()
717 struct radeon_device *rdev = dev_get_drvdata(dev); hwmon_attributes_visible() local
721 if (rdev->pm.pm_method != PM_METHOD_DPM && hwmon_attributes_visible()
731 if (rdev->pm.no_fan && hwmon_attributes_visible()
739 if ((!rdev->asic->dpm.get_fan_speed_percent && hwmon_attributes_visible()
741 (!rdev->asic->dpm.fan_ctrl_get_mode && hwmon_attributes_visible()
745 if ((!rdev->asic->dpm.set_fan_speed_percent && hwmon_attributes_visible()
747 (!rdev->asic->dpm.fan_ctrl_set_mode && hwmon_attributes_visible()
752 if ((!rdev->asic->dpm.set_fan_speed_percent && hwmon_attributes_visible()
753 !rdev->asic->dpm.get_fan_speed_percent) && hwmon_attributes_visible()
771 static int radeon_hwmon_init(struct radeon_device *rdev) radeon_hwmon_init() argument
775 switch (rdev->pm.int_thermal_type) { radeon_hwmon_init()
784 if (rdev->asic->pm.get_temperature == NULL) radeon_hwmon_init()
786 rdev->pm.int_hwmon_dev = hwmon_device_register_with_groups(rdev->dev, radeon_hwmon_init()
787 "radeon", rdev, radeon_hwmon_init()
789 if (IS_ERR(rdev->pm.int_hwmon_dev)) { radeon_hwmon_init()
790 err = PTR_ERR(rdev->pm.int_hwmon_dev); radeon_hwmon_init()
791 dev_err(rdev->dev, radeon_hwmon_init()
802 static void radeon_hwmon_fini(struct radeon_device *rdev) radeon_hwmon_fini() argument
804 if (rdev->pm.int_hwmon_dev) radeon_hwmon_fini()
805 hwmon_device_unregister(rdev->pm.int_hwmon_dev); radeon_hwmon_fini()
810 struct radeon_device *rdev = radeon_dpm_thermal_work_handler() local
816 if (!rdev->pm.dpm_enabled) radeon_dpm_thermal_work_handler()
819 if (rdev->asic->pm.get_temperature) { radeon_dpm_thermal_work_handler()
820 int temp = radeon_get_temperature(rdev); radeon_dpm_thermal_work_handler()
822 if (temp < rdev->pm.dpm.thermal.min_temp) radeon_dpm_thermal_work_handler()
824 dpm_state = rdev->pm.dpm.user_state; radeon_dpm_thermal_work_handler()
826 if (rdev->pm.dpm.thermal.high_to_low) radeon_dpm_thermal_work_handler()
828 dpm_state = rdev->pm.dpm.user_state; radeon_dpm_thermal_work_handler()
830 mutex_lock(&rdev->pm.mutex); radeon_dpm_thermal_work_handler()
832 rdev->pm.dpm.thermal_active = true; radeon_dpm_thermal_work_handler()
834 rdev->pm.dpm.thermal_active = false; radeon_dpm_thermal_work_handler()
835 rdev->pm.dpm.state = dpm_state; radeon_dpm_thermal_work_handler()
836 mutex_unlock(&rdev->pm.mutex); radeon_dpm_thermal_work_handler()
838 radeon_pm_compute_clocks(rdev); radeon_dpm_thermal_work_handler()
841 static bool radeon_dpm_single_display(struct radeon_device *rdev) radeon_dpm_single_display() argument
843 bool single_display = (rdev->pm.dpm.new_active_crtc_count < 2) ? radeon_dpm_single_display()
847 if (single_display && rdev->asic->dpm.vblank_too_short) { radeon_dpm_single_display()
848 if (radeon_dpm_vblank_too_short(rdev)) radeon_dpm_single_display()
855 if (single_display && (r600_dpm_get_vrefresh(rdev) >= 120)) radeon_dpm_single_display()
861 static struct radeon_ps *radeon_dpm_pick_power_state(struct radeon_device *rdev, radeon_dpm_pick_power_state() argument
867 bool single_display = radeon_dpm_single_display(rdev); radeon_dpm_pick_power_state()
880 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { radeon_dpm_pick_power_state()
881 ps = &rdev->pm.dpm.ps[i]; radeon_dpm_pick_power_state()
914 if (rdev->pm.dpm.uvd_ps) radeon_dpm_pick_power_state()
915 return rdev->pm.dpm.uvd_ps; radeon_dpm_pick_power_state()
935 return rdev->pm.dpm.boot_ps; radeon_dpm_pick_power_state()
964 if (rdev->pm.dpm.uvd_ps) { radeon_dpm_pick_power_state()
965 return rdev->pm.dpm.uvd_ps; radeon_dpm_pick_power_state()
988 static void radeon_dpm_change_power_state_locked(struct radeon_device *rdev) radeon_dpm_change_power_state_locked() argument
994 bool single_display = radeon_dpm_single_display(rdev); radeon_dpm_change_power_state_locked()
997 if (!rdev->pm.dpm_enabled) radeon_dpm_change_power_state_locked()
1000 if (rdev->pm.dpm.user_state != rdev->pm.dpm.state) { radeon_dpm_change_power_state_locked()
1002 if ((!rdev->pm.dpm.thermal_active) && radeon_dpm_change_power_state_locked()
1003 (!rdev->pm.dpm.uvd_active)) radeon_dpm_change_power_state_locked()
1004 rdev->pm.dpm.state = rdev->pm.dpm.user_state; radeon_dpm_change_power_state_locked()
1006 dpm_state = rdev->pm.dpm.state; radeon_dpm_change_power_state_locked()
1008 ps = radeon_dpm_pick_power_state(rdev, dpm_state); radeon_dpm_change_power_state_locked()
1010 rdev->pm.dpm.requested_ps = ps; radeon_dpm_change_power_state_locked()
1015 if (rdev->pm.dpm.current_ps == rdev->pm.dpm.requested_ps) { radeon_dpm_change_power_state_locked()
1017 if (ps->vce_active != rdev->pm.dpm.vce_active) radeon_dpm_change_power_state_locked()
1020 if (rdev->pm.dpm.single_display != single_display) radeon_dpm_change_power_state_locked()
1022 if ((rdev->family < CHIP_BARTS) || (rdev->flags & RADEON_IS_IGP)) { radeon_dpm_change_power_state_locked()
1026 if (rdev->pm.dpm.new_active_crtcs != rdev->pm.dpm.current_active_crtcs) { radeon_dpm_change_power_state_locked()
1028 radeon_bandwidth_update(rdev); radeon_dpm_change_power_state_locked()
1030 radeon_dpm_display_configuration_changed(rdev); radeon_dpm_change_power_state_locked()
1031 rdev->pm.dpm.current_active_crtcs = rdev->pm.dpm.new_active_crtcs; radeon_dpm_change_power_state_locked()
1032 rdev->pm.dpm.current_active_crtc_count = rdev->pm.dpm.new_active_crtc_count; radeon_dpm_change_power_state_locked()
1040 if (rdev->pm.dpm.new_active_crtcs == radeon_dpm_change_power_state_locked()
1041 rdev->pm.dpm.current_active_crtcs) { radeon_dpm_change_power_state_locked()
1044 if ((rdev->pm.dpm.current_active_crtc_count > 1) && radeon_dpm_change_power_state_locked()
1045 (rdev->pm.dpm.new_active_crtc_count > 1)) { radeon_dpm_change_power_state_locked()
1047 radeon_bandwidth_update(rdev); radeon_dpm_change_power_state_locked()
1049 radeon_dpm_display_configuration_changed(rdev); radeon_dpm_change_power_state_locked()
1050 rdev->pm.dpm.current_active_crtcs = rdev->pm.dpm.new_active_crtcs; radeon_dpm_change_power_state_locked()
1051 rdev->pm.dpm.current_active_crtc_count = rdev->pm.dpm.new_active_crtc_count; radeon_dpm_change_power_state_locked()
1061 radeon_dpm_print_power_state(rdev, rdev->pm.dpm.current_ps); radeon_dpm_change_power_state_locked()
1063 radeon_dpm_print_power_state(rdev, rdev->pm.dpm.requested_ps); radeon_dpm_change_power_state_locked()
1066 down_write(&rdev->pm.mclk_lock); radeon_dpm_change_power_state_locked()
1067 mutex_lock(&rdev->ring_lock); radeon_dpm_change_power_state_locked()
1070 ps->vce_active = rdev->pm.dpm.vce_active; radeon_dpm_change_power_state_locked()
1072 ret = radeon_dpm_pre_set_power_state(rdev); radeon_dpm_change_power_state_locked()
1077 radeon_bandwidth_update(rdev); radeon_dpm_change_power_state_locked()
1079 radeon_dpm_display_configuration_changed(rdev); radeon_dpm_change_power_state_locked()
1083 struct radeon_ring *ring = &rdev->ring[i]; radeon_dpm_change_power_state_locked()
1085 radeon_fence_wait_empty(rdev, i); radeon_dpm_change_power_state_locked()
1089 radeon_dpm_set_power_state(rdev); radeon_dpm_change_power_state_locked()
1092 rdev->pm.dpm.current_ps = rdev->pm.dpm.requested_ps; radeon_dpm_change_power_state_locked()
1094 radeon_dpm_post_set_power_state(rdev); radeon_dpm_change_power_state_locked()
1096 rdev->pm.dpm.current_active_crtcs = rdev->pm.dpm.new_active_crtcs; radeon_dpm_change_power_state_locked()
1097 rdev->pm.dpm.current_active_crtc_count = rdev->pm.dpm.new_active_crtc_count; radeon_dpm_change_power_state_locked()
1098 rdev->pm.dpm.single_display = single_display; radeon_dpm_change_power_state_locked()
1100 if (rdev->asic->dpm.force_performance_level) { radeon_dpm_change_power_state_locked()
1101 if (rdev->pm.dpm.thermal_active) { radeon_dpm_change_power_state_locked()
1102 enum radeon_dpm_forced_level level = rdev->pm.dpm.forced_level; radeon_dpm_change_power_state_locked()
1104 radeon_dpm_force_performance_level(rdev, RADEON_DPM_FORCED_LEVEL_LOW); radeon_dpm_change_power_state_locked()
1106 rdev->pm.dpm.forced_level = level; radeon_dpm_change_power_state_locked()
1109 radeon_dpm_force_performance_level(rdev, rdev->pm.dpm.forced_level); radeon_dpm_change_power_state_locked()
1114 mutex_unlock(&rdev->ring_lock); radeon_dpm_change_power_state_locked()
1115 up_write(&rdev->pm.mclk_lock); radeon_dpm_change_power_state_locked()
1118 void radeon_dpm_enable_uvd(struct radeon_device *rdev, bool enable) radeon_dpm_enable_uvd() argument
1122 if (rdev->asic->dpm.powergate_uvd) { radeon_dpm_enable_uvd()
1123 mutex_lock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1126 enable |= rdev->pm.dpm.sd > 0; radeon_dpm_enable_uvd()
1127 enable |= rdev->pm.dpm.hd > 0; radeon_dpm_enable_uvd()
1129 radeon_dpm_powergate_uvd(rdev, !enable); radeon_dpm_enable_uvd()
1130 mutex_unlock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1133 mutex_lock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1134 rdev->pm.dpm.uvd_active = true; radeon_dpm_enable_uvd()
1137 if ((rdev->pm.dpm.sd == 1) && (rdev->pm.dpm.hd == 0)) radeon_dpm_enable_uvd()
1139 else if ((rdev->pm.dpm.sd == 2) && (rdev->pm.dpm.hd == 0)) radeon_dpm_enable_uvd()
1141 else if ((rdev->pm.dpm.sd == 0) && (rdev->pm.dpm.hd == 1)) radeon_dpm_enable_uvd()
1143 else if ((rdev->pm.dpm.sd == 0) && (rdev->pm.dpm.hd == 2)) radeon_dpm_enable_uvd()
1148 rdev->pm.dpm.state = dpm_state; radeon_dpm_enable_uvd()
1149 mutex_unlock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1151 mutex_lock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1152 rdev->pm.dpm.uvd_active = false; radeon_dpm_enable_uvd()
1153 mutex_unlock(&rdev->pm.mutex); radeon_dpm_enable_uvd()
1156 radeon_pm_compute_clocks(rdev); radeon_dpm_enable_uvd()
1160 void radeon_dpm_enable_vce(struct radeon_device *rdev, bool enable) radeon_dpm_enable_vce() argument
1163 mutex_lock(&rdev->pm.mutex); radeon_dpm_enable_vce()
1164 rdev->pm.dpm.vce_active = true; radeon_dpm_enable_vce()
1166 rdev->pm.dpm.vce_level = RADEON_VCE_LEVEL_AC_ALL; radeon_dpm_enable_vce()
1167 mutex_unlock(&rdev->pm.mutex); radeon_dpm_enable_vce()
1169 mutex_lock(&rdev->pm.mutex); radeon_dpm_enable_vce()
1170 rdev->pm.dpm.vce_active = false; radeon_dpm_enable_vce()
1171 mutex_unlock(&rdev->pm.mutex); radeon_dpm_enable_vce()
1174 radeon_pm_compute_clocks(rdev); radeon_dpm_enable_vce()
1177 static void radeon_pm_suspend_old(struct radeon_device *rdev) radeon_pm_suspend_old() argument
1179 mutex_lock(&rdev->pm.mutex); radeon_pm_suspend_old()
1180 if (rdev->pm.pm_method == PM_METHOD_DYNPM) { radeon_pm_suspend_old()
1181 if (rdev->pm.dynpm_state == DYNPM_STATE_ACTIVE) radeon_pm_suspend_old()
1182 rdev->pm.dynpm_state = DYNPM_STATE_SUSPENDED; radeon_pm_suspend_old()
1184 mutex_unlock(&rdev->pm.mutex); radeon_pm_suspend_old()
1186 cancel_delayed_work_sync(&rdev->pm.dynpm_idle_work); radeon_pm_suspend_old()
1189 static void radeon_pm_suspend_dpm(struct radeon_device *rdev) radeon_pm_suspend_dpm() argument
1191 mutex_lock(&rdev->pm.mutex); radeon_pm_suspend_dpm()
1193 radeon_dpm_disable(rdev); radeon_pm_suspend_dpm()
1195 rdev->pm.dpm.current_ps = rdev->pm.dpm.requested_ps = rdev->pm.dpm.boot_ps; radeon_pm_suspend_dpm()
1196 rdev->pm.dpm_enabled = false; radeon_pm_suspend_dpm()
1197 mutex_unlock(&rdev->pm.mutex); radeon_pm_suspend_dpm()
1200 void radeon_pm_suspend(struct radeon_device *rdev) radeon_pm_suspend() argument
1202 if (rdev->pm.pm_method == PM_METHOD_DPM) radeon_pm_suspend()
1203 radeon_pm_suspend_dpm(rdev); radeon_pm_suspend()
1205 radeon_pm_suspend_old(rdev); radeon_pm_suspend()
1208 static void radeon_pm_resume_old(struct radeon_device *rdev) radeon_pm_resume_old() argument
1211 if ((rdev->family >= CHIP_BARTS) && radeon_pm_resume_old()
1212 (rdev->family <= CHIP_CAYMAN) && radeon_pm_resume_old()
1213 rdev->mc_fw) { radeon_pm_resume_old()
1214 if (rdev->pm.default_vddc) radeon_pm_resume_old()
1215 radeon_atom_set_voltage(rdev, rdev->pm.default_vddc, radeon_pm_resume_old()
1217 if (rdev->pm.default_vddci) radeon_pm_resume_old()
1218 radeon_atom_set_voltage(rdev, rdev->pm.default_vddci, radeon_pm_resume_old()
1220 if (rdev->pm.default_sclk) radeon_pm_resume_old()
1221 radeon_set_engine_clock(rdev, rdev->pm.default_sclk); radeon_pm_resume_old()
1222 if (rdev->pm.default_mclk) radeon_pm_resume_old()
1223 radeon_set_memory_clock(rdev, rdev->pm.default_mclk); radeon_pm_resume_old()
1226 mutex_lock(&rdev->pm.mutex); radeon_pm_resume_old()
1227 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index; radeon_pm_resume_old()
1228 rdev->pm.current_clock_mode_index = 0; radeon_pm_resume_old()
1229 rdev->pm.current_sclk = rdev->pm.default_sclk; radeon_pm_resume_old()
1230 rdev->pm.current_mclk = rdev->pm.default_mclk; radeon_pm_resume_old()
1231 if (rdev->pm.power_state) { radeon_pm_resume_old()
1232 rdev->pm.current_vddc = rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage; radeon_pm_resume_old()
1233 rdev->pm.current_vddci = rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.vddci; radeon_pm_resume_old()
1235 if (rdev->pm.pm_method == PM_METHOD_DYNPM radeon_pm_resume_old()
1236 && rdev->pm.dynpm_state == DYNPM_STATE_SUSPENDED) { radeon_pm_resume_old()
1237 rdev->pm.dynpm_state = DYNPM_STATE_ACTIVE; radeon_pm_resume_old()
1238 schedule_delayed_work(&rdev->pm.dynpm_idle_work, radeon_pm_resume_old()
1241 mutex_unlock(&rdev->pm.mutex); radeon_pm_resume_old()
1242 radeon_pm_compute_clocks(rdev); radeon_pm_resume_old()
1245 static void radeon_pm_resume_dpm(struct radeon_device *rdev) radeon_pm_resume_dpm() argument
1250 mutex_lock(&rdev->pm.mutex); radeon_pm_resume_dpm()
1251 rdev->pm.dpm.current_ps = rdev->pm.dpm.requested_ps = rdev->pm.dpm.boot_ps; radeon_pm_resume_dpm()
1252 radeon_dpm_setup_asic(rdev); radeon_pm_resume_dpm()
1253 ret = radeon_dpm_enable(rdev); radeon_pm_resume_dpm()
1254 mutex_unlock(&rdev->pm.mutex); radeon_pm_resume_dpm()
1257 rdev->pm.dpm_enabled = true; radeon_pm_resume_dpm()
1262 if ((rdev->family >= CHIP_BARTS) && radeon_pm_resume_dpm()
1263 (rdev->family <= CHIP_CAYMAN) && radeon_pm_resume_dpm()
1264 rdev->mc_fw) { radeon_pm_resume_dpm()
1265 if (rdev->pm.default_vddc) radeon_pm_resume_dpm()
1266 radeon_atom_set_voltage(rdev, rdev->pm.default_vddc, radeon_pm_resume_dpm()
1268 if (rdev->pm.default_vddci) radeon_pm_resume_dpm()
1269 radeon_atom_set_voltage(rdev, rdev->pm.default_vddci, radeon_pm_resume_dpm()
1271 if (rdev->pm.default_sclk) radeon_pm_resume_dpm()
1272 radeon_set_engine_clock(rdev, rdev->pm.default_sclk); radeon_pm_resume_dpm()
1273 if (rdev->pm.default_mclk) radeon_pm_resume_dpm()
1274 radeon_set_memory_clock(rdev, rdev->pm.default_mclk); radeon_pm_resume_dpm()
1278 void radeon_pm_resume(struct radeon_device *rdev) radeon_pm_resume() argument
1280 if (rdev->pm.pm_method == PM_METHOD_DPM) radeon_pm_resume()
1281 radeon_pm_resume_dpm(rdev); radeon_pm_resume()
1283 radeon_pm_resume_old(rdev); radeon_pm_resume()
1286 static int radeon_pm_init_old(struct radeon_device *rdev) radeon_pm_init_old() argument
1290 rdev->pm.profile = PM_PROFILE_DEFAULT; radeon_pm_init_old()
1291 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; radeon_pm_init_old()
1292 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; radeon_pm_init_old()
1293 rdev->pm.dynpm_can_upclock = true; radeon_pm_init_old()
1294 rdev->pm.dynpm_can_downclock = true; radeon_pm_init_old()
1295 rdev->pm.default_sclk = rdev->clock.default_sclk; radeon_pm_init_old()
1296 rdev->pm.default_mclk = rdev->clock.default_mclk; radeon_pm_init_old()
1297 rdev->pm.current_sclk = rdev->clock.default_sclk; radeon_pm_init_old()
1298 rdev->pm.current_mclk = rdev->clock.default_mclk; radeon_pm_init_old()
1299 rdev->pm.int_thermal_type = THERMAL_TYPE_NONE; radeon_pm_init_old()
1301 if (rdev->bios) { radeon_pm_init_old()
1302 if (rdev->is_atom_bios) radeon_pm_init_old()
1303 radeon_atombios_get_power_modes(rdev); radeon_pm_init_old()
1305 radeon_combios_get_power_modes(rdev); radeon_pm_init_old()
1306 radeon_pm_print_states(rdev); radeon_pm_init_old()
1307 radeon_pm_init_profile(rdev); radeon_pm_init_old()
1309 if ((rdev->family >= CHIP_BARTS) && radeon_pm_init_old()
1310 (rdev->family <= CHIP_CAYMAN) && radeon_pm_init_old()
1311 rdev->mc_fw) { radeon_pm_init_old()
1312 if (rdev->pm.default_vddc) radeon_pm_init_old()
1313 radeon_atom_set_voltage(rdev, rdev->pm.default_vddc, radeon_pm_init_old()
1315 if (rdev->pm.default_vddci) radeon_pm_init_old()
1316 radeon_atom_set_voltage(rdev, rdev->pm.default_vddci, radeon_pm_init_old()
1318 if (rdev->pm.default_sclk) radeon_pm_init_old()
1319 radeon_set_engine_clock(rdev, rdev->pm.default_sclk); radeon_pm_init_old()
1320 if (rdev->pm.default_mclk) radeon_pm_init_old()
1321 radeon_set_memory_clock(rdev, rdev->pm.default_mclk); radeon_pm_init_old()
1326 ret = radeon_hwmon_init(rdev); radeon_pm_init_old()
1330 INIT_DELAYED_WORK(&rdev->pm.dynpm_idle_work, radeon_dynpm_idle_work_handler); radeon_pm_init_old()
1332 if (rdev->pm.num_power_states > 1) { radeon_pm_init_old()
1333 if (radeon_debugfs_pm_init(rdev)) { radeon_pm_init_old()
1343 static void radeon_dpm_print_power_states(struct radeon_device *rdev) radeon_dpm_print_power_states() argument
1347 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { radeon_dpm_print_power_states()
1349 radeon_dpm_print_power_state(rdev, &rdev->pm.dpm.ps[i]); radeon_dpm_print_power_states()
1353 static int radeon_pm_init_dpm(struct radeon_device *rdev) radeon_pm_init_dpm() argument
1358 rdev->pm.dpm.state = POWER_STATE_TYPE_BALANCED; radeon_pm_init_dpm()
1359 rdev->pm.dpm.user_state = POWER_STATE_TYPE_BALANCED; radeon_pm_init_dpm()
1360 rdev->pm.dpm.forced_level = RADEON_DPM_FORCED_LEVEL_AUTO; radeon_pm_init_dpm()
1361 rdev->pm.default_sclk = rdev->clock.default_sclk; radeon_pm_init_dpm()
1362 rdev->pm.default_mclk = rdev->clock.default_mclk; radeon_pm_init_dpm()
1363 rdev->pm.current_sclk = rdev->clock.default_sclk; radeon_pm_init_dpm()
1364 rdev->pm.current_mclk = rdev->clock.default_mclk; radeon_pm_init_dpm()
1365 rdev->pm.int_thermal_type = THERMAL_TYPE_NONE; radeon_pm_init_dpm()
1367 if (rdev->bios && rdev->is_atom_bios) radeon_pm_init_dpm()
1368 radeon_atombios_get_power_modes(rdev); radeon_pm_init_dpm()
1373 ret = radeon_hwmon_init(rdev); radeon_pm_init_dpm()
1377 INIT_WORK(&rdev->pm.dpm.thermal.work, radeon_dpm_thermal_work_handler); radeon_pm_init_dpm()
1378 mutex_lock(&rdev->pm.mutex); radeon_pm_init_dpm()
1379 radeon_dpm_init(rdev); radeon_pm_init_dpm()
1380 rdev->pm.dpm.current_ps = rdev->pm.dpm.requested_ps = rdev->pm.dpm.boot_ps; radeon_pm_init_dpm()
1382 radeon_dpm_print_power_states(rdev); radeon_pm_init_dpm()
1383 radeon_dpm_setup_asic(rdev); radeon_pm_init_dpm()
1384 ret = radeon_dpm_enable(rdev); radeon_pm_init_dpm()
1385 mutex_unlock(&rdev->pm.mutex); radeon_pm_init_dpm()
1388 rdev->pm.dpm_enabled = true; radeon_pm_init_dpm()
1390 if (radeon_debugfs_pm_init(rdev)) { radeon_pm_init_dpm()
1399 rdev->pm.dpm_enabled = false; radeon_pm_init_dpm()
1400 if ((rdev->family >= CHIP_BARTS) && radeon_pm_init_dpm()
1401 (rdev->family <= CHIP_CAYMAN) && radeon_pm_init_dpm()
1402 rdev->mc_fw) { radeon_pm_init_dpm()
1403 if (rdev->pm.default_vddc) radeon_pm_init_dpm()
1404 radeon_atom_set_voltage(rdev, rdev->pm.default_vddc, radeon_pm_init_dpm()
1406 if (rdev->pm.default_vddci) radeon_pm_init_dpm()
1407 radeon_atom_set_voltage(rdev, rdev->pm.default_vddci, radeon_pm_init_dpm()
1409 if (rdev->pm.default_sclk) radeon_pm_init_dpm()
1410 radeon_set_engine_clock(rdev, rdev->pm.default_sclk); radeon_pm_init_dpm()
1411 if (rdev->pm.default_mclk) radeon_pm_init_dpm()
1412 radeon_set_memory_clock(rdev, rdev->pm.default_mclk); radeon_pm_init_dpm()
1434 int radeon_pm_init(struct radeon_device *rdev) radeon_pm_init() argument
1441 if (rdev->pdev->vendor == p->chip_vendor && radeon_pm_init()
1442 rdev->pdev->device == p->chip_device && radeon_pm_init()
1443 rdev->pdev->subsystem_vendor == p->subsys_vendor && radeon_pm_init()
1444 rdev->pdev->subsystem_device == p->subsys_device) { radeon_pm_init()
1452 switch (rdev->family) { radeon_pm_init()
1462 if (!rdev->rlc_fw) radeon_pm_init()
1463 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1464 else if ((rdev->family >= CHIP_RV770) && radeon_pm_init()
1465 (!(rdev->flags & RADEON_IS_IGP)) && radeon_pm_init()
1466 (!rdev->smc_fw)) radeon_pm_init()
1467 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1469 rdev->pm.pm_method = PM_METHOD_DPM; radeon_pm_init()
1471 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1500 if (!rdev->rlc_fw) radeon_pm_init()
1501 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1502 else if ((rdev->family >= CHIP_RV770) && radeon_pm_init()
1503 (!(rdev->flags & RADEON_IS_IGP)) && radeon_pm_init()
1504 (!rdev->smc_fw)) radeon_pm_init()
1505 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1507 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1509 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1511 rdev->pm.pm_method = PM_METHOD_DPM; radeon_pm_init()
1515 rdev->pm.pm_method = PM_METHOD_PROFILE; radeon_pm_init()
1519 if (rdev->pm.pm_method == PM_METHOD_DPM) radeon_pm_init()
1520 return radeon_pm_init_dpm(rdev); radeon_pm_init()
1522 return radeon_pm_init_old(rdev); radeon_pm_init()
1525 int radeon_pm_late_init(struct radeon_device *rdev) radeon_pm_late_init() argument
1529 if (rdev->pm.pm_method == PM_METHOD_DPM) { radeon_pm_late_init()
1530 if (rdev->pm.dpm_enabled) { radeon_pm_late_init()
1531 if (!rdev->pm.sysfs_initialized) { radeon_pm_late_init()
1532 ret = device_create_file(rdev->dev, &dev_attr_power_dpm_state); radeon_pm_late_init()
1535 ret = device_create_file(rdev->dev, &dev_attr_power_dpm_force_performance_level); radeon_pm_late_init()
1539 ret = device_create_file(rdev->dev, &dev_attr_power_profile); radeon_pm_late_init()
1542 ret = device_create_file(rdev->dev, &dev_attr_power_method); radeon_pm_late_init()
1545 rdev->pm.sysfs_initialized = true; radeon_pm_late_init()
1548 mutex_lock(&rdev->pm.mutex); radeon_pm_late_init()
1549 ret = radeon_dpm_late_enable(rdev); radeon_pm_late_init()
1550 mutex_unlock(&rdev->pm.mutex); radeon_pm_late_init()
1552 rdev->pm.dpm_enabled = false; radeon_pm_late_init()
1558 radeon_pm_compute_clocks(rdev); radeon_pm_late_init()
1562 if ((rdev->pm.num_power_states > 1) && radeon_pm_late_init()
1563 (!rdev->pm.sysfs_initialized)) { radeon_pm_late_init()
1565 ret = device_create_file(rdev->dev, &dev_attr_power_profile); radeon_pm_late_init()
1568 ret = device_create_file(rdev->dev, &dev_attr_power_method); radeon_pm_late_init()
1572 rdev->pm.sysfs_initialized = true; radeon_pm_late_init()
1578 static void radeon_pm_fini_old(struct radeon_device *rdev) radeon_pm_fini_old() argument
1580 if (rdev->pm.num_power_states > 1) { radeon_pm_fini_old()
1581 mutex_lock(&rdev->pm.mutex); radeon_pm_fini_old()
1582 if (rdev->pm.pm_method == PM_METHOD_PROFILE) { radeon_pm_fini_old()
1583 rdev->pm.profile = PM_PROFILE_DEFAULT; radeon_pm_fini_old()
1584 radeon_pm_update_profile(rdev); radeon_pm_fini_old()
1585 radeon_pm_set_clocks(rdev); radeon_pm_fini_old()
1586 } else if (rdev->pm.pm_method == PM_METHOD_DYNPM) { radeon_pm_fini_old()
1588 rdev->pm.dynpm_state = DYNPM_STATE_DISABLED; radeon_pm_fini_old()
1589 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT; radeon_pm_fini_old()
1590 radeon_pm_set_clocks(rdev); radeon_pm_fini_old()
1592 mutex_unlock(&rdev->pm.mutex); radeon_pm_fini_old()
1594 cancel_delayed_work_sync(&rdev->pm.dynpm_idle_work); radeon_pm_fini_old()
1596 device_remove_file(rdev->dev, &dev_attr_power_profile); radeon_pm_fini_old()
1597 device_remove_file(rdev->dev, &dev_attr_power_method); radeon_pm_fini_old()
1600 radeon_hwmon_fini(rdev); radeon_pm_fini_old()
1601 kfree(rdev->pm.power_state); radeon_pm_fini_old()
1604 static void radeon_pm_fini_dpm(struct radeon_device *rdev) radeon_pm_fini_dpm() argument
1606 if (rdev->pm.num_power_states > 1) { radeon_pm_fini_dpm()
1607 mutex_lock(&rdev->pm.mutex); radeon_pm_fini_dpm()
1608 radeon_dpm_disable(rdev); radeon_pm_fini_dpm()
1609 mutex_unlock(&rdev->pm.mutex); radeon_pm_fini_dpm()
1611 device_remove_file(rdev->dev, &dev_attr_power_dpm_state); radeon_pm_fini_dpm()
1612 device_remove_file(rdev->dev, &dev_attr_power_dpm_force_performance_level); radeon_pm_fini_dpm()
1614 device_remove_file(rdev->dev, &dev_attr_power_profile); radeon_pm_fini_dpm()
1615 device_remove_file(rdev->dev, &dev_attr_power_method); radeon_pm_fini_dpm()
1617 radeon_dpm_fini(rdev); radeon_pm_fini_dpm()
1619 radeon_hwmon_fini(rdev); radeon_pm_fini_dpm()
1620 kfree(rdev->pm.power_state); radeon_pm_fini_dpm()
1623 void radeon_pm_fini(struct radeon_device *rdev) radeon_pm_fini() argument
1625 if (rdev->pm.pm_method == PM_METHOD_DPM) radeon_pm_fini()
1626 radeon_pm_fini_dpm(rdev); radeon_pm_fini()
1628 radeon_pm_fini_old(rdev); radeon_pm_fini()
1631 static void radeon_pm_compute_clocks_old(struct radeon_device *rdev) radeon_pm_compute_clocks_old() argument
1633 struct drm_device *ddev = rdev->ddev; radeon_pm_compute_clocks_old()
1637 if (rdev->pm.num_power_states < 2) radeon_pm_compute_clocks_old()
1640 mutex_lock(&rdev->pm.mutex); radeon_pm_compute_clocks_old()
1642 rdev->pm.active_crtcs = 0; radeon_pm_compute_clocks_old()
1643 rdev->pm.active_crtc_count = 0; radeon_pm_compute_clocks_old()
1644 if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { radeon_pm_compute_clocks_old()
1649 rdev->pm.active_crtcs |= (1 << radeon_crtc->crtc_id); radeon_pm_compute_clocks_old()
1650 rdev->pm.active_crtc_count++; radeon_pm_compute_clocks_old()
1655 if (rdev->pm.pm_method == PM_METHOD_PROFILE) { radeon_pm_compute_clocks_old()
1656 radeon_pm_update_profile(rdev); radeon_pm_compute_clocks_old()
1657 radeon_pm_set_clocks(rdev); radeon_pm_compute_clocks_old()
1658 } else if (rdev->pm.pm_method == PM_METHOD_DYNPM) { radeon_pm_compute_clocks_old()
1659 if (rdev->pm.dynpm_state != DYNPM_STATE_DISABLED) { radeon_pm_compute_clocks_old()
1660 if (rdev->pm.active_crtc_count > 1) { radeon_pm_compute_clocks_old()
1661 if (rdev->pm.dynpm_state == DYNPM_STATE_ACTIVE) { radeon_pm_compute_clocks_old()
1662 cancel_delayed_work(&rdev->pm.dynpm_idle_work); radeon_pm_compute_clocks_old()
1664 rdev->pm.dynpm_state = DYNPM_STATE_PAUSED; radeon_pm_compute_clocks_old()
1665 rdev->pm.dynpm_planned_action = DYNPM_ACTION_DEFAULT; radeon_pm_compute_clocks_old()
1666 radeon_pm_get_dynpm_state(rdev); radeon_pm_compute_clocks_old()
1667 radeon_pm_set_clocks(rdev); radeon_pm_compute_clocks_old()
1671 } else if (rdev->pm.active_crtc_count == 1) { radeon_pm_compute_clocks_old()
1674 if (rdev->pm.dynpm_state == DYNPM_STATE_MINIMUM) { radeon_pm_compute_clocks_old()
1675 rdev->pm.dynpm_state = DYNPM_STATE_ACTIVE; radeon_pm_compute_clocks_old()
1676 rdev->pm.dynpm_planned_action = DYNPM_ACTION_UPCLOCK; radeon_pm_compute_clocks_old()
1677 radeon_pm_get_dynpm_state(rdev); radeon_pm_compute_clocks_old()
1678 radeon_pm_set_clocks(rdev); radeon_pm_compute_clocks_old()
1680 schedule_delayed_work(&rdev->pm.dynpm_idle_work, radeon_pm_compute_clocks_old()
1682 } else if (rdev->pm.dynpm_state == DYNPM_STATE_PAUSED) { radeon_pm_compute_clocks_old()
1683 rdev->pm.dynpm_state = DYNPM_STATE_ACTIVE; radeon_pm_compute_clocks_old()
1684 schedule_delayed_work(&rdev->pm.dynpm_idle_work, radeon_pm_compute_clocks_old()
1689 if (rdev->pm.dynpm_state != DYNPM_STATE_MINIMUM) { radeon_pm_compute_clocks_old()
1690 cancel_delayed_work(&rdev->pm.dynpm_idle_work); radeon_pm_compute_clocks_old()
1692 rdev->pm.dynpm_state = DYNPM_STATE_MINIMUM; radeon_pm_compute_clocks_old()
1693 rdev->pm.dynpm_planned_action = DYNPM_ACTION_MINIMUM; radeon_pm_compute_clocks_old()
1694 radeon_pm_get_dynpm_state(rdev); radeon_pm_compute_clocks_old()
1695 radeon_pm_set_clocks(rdev); radeon_pm_compute_clocks_old()
1701 mutex_unlock(&rdev->pm.mutex); radeon_pm_compute_clocks_old()
1704 static void radeon_pm_compute_clocks_dpm(struct radeon_device *rdev) radeon_pm_compute_clocks_dpm() argument
1706 struct drm_device *ddev = rdev->ddev; radeon_pm_compute_clocks_dpm()
1710 if (!rdev->pm.dpm_enabled) radeon_pm_compute_clocks_dpm()
1713 mutex_lock(&rdev->pm.mutex); radeon_pm_compute_clocks_dpm()
1716 rdev->pm.dpm.new_active_crtcs = 0; radeon_pm_compute_clocks_dpm()
1717 rdev->pm.dpm.new_active_crtc_count = 0; radeon_pm_compute_clocks_dpm()
1718 if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { radeon_pm_compute_clocks_dpm()
1723 rdev->pm.dpm.new_active_crtcs |= (1 << radeon_crtc->crtc_id); radeon_pm_compute_clocks_dpm()
1724 rdev->pm.dpm.new_active_crtc_count++; radeon_pm_compute_clocks_dpm()
1731 rdev->pm.dpm.ac_power = true; radeon_pm_compute_clocks_dpm()
1733 rdev->pm.dpm.ac_power = false; radeon_pm_compute_clocks_dpm()
1735 radeon_dpm_change_power_state_locked(rdev); radeon_pm_compute_clocks_dpm()
1737 mutex_unlock(&rdev->pm.mutex); radeon_pm_compute_clocks_dpm()
1741 void radeon_pm_compute_clocks(struct radeon_device *rdev) radeon_pm_compute_clocks() argument
1743 if (rdev->pm.pm_method == PM_METHOD_DPM) radeon_pm_compute_clocks()
1744 radeon_pm_compute_clocks_dpm(rdev); radeon_pm_compute_clocks()
1746 radeon_pm_compute_clocks_old(rdev); radeon_pm_compute_clocks()
1749 static bool radeon_pm_in_vbl(struct radeon_device *rdev) radeon_pm_in_vbl() argument
1757 for (crtc = 0; (crtc < rdev->num_crtc) && in_vbl; crtc++) { radeon_pm_in_vbl()
1758 if (rdev->pm.active_crtcs & (1 << crtc)) { radeon_pm_in_vbl()
1759 vbl_status = radeon_get_crtc_scanoutpos(rdev->ddev, radeon_pm_in_vbl()
1763 &rdev->mode_info.crtcs[crtc]->base.hwmode); radeon_pm_in_vbl()
1773 static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish) radeon_pm_debug_check_in_vbl() argument
1776 bool in_vbl = radeon_pm_in_vbl(rdev); radeon_pm_debug_check_in_vbl()
1786 struct radeon_device *rdev; radeon_dynpm_idle_work_handler() local
1788 rdev = container_of(work, struct radeon_device, radeon_dynpm_idle_work_handler()
1791 resched = ttm_bo_lock_delayed_workqueue(&rdev->mman.bdev); radeon_dynpm_idle_work_handler()
1792 mutex_lock(&rdev->pm.mutex); radeon_dynpm_idle_work_handler()
1793 if (rdev->pm.dynpm_state == DYNPM_STATE_ACTIVE) { radeon_dynpm_idle_work_handler()
1798 struct radeon_ring *ring = &rdev->ring[i]; radeon_dynpm_idle_work_handler()
1801 not_processed += radeon_fence_count_emitted(rdev, i); radeon_dynpm_idle_work_handler()
1808 if (rdev->pm.dynpm_planned_action == DYNPM_ACTION_DOWNCLOCK) { radeon_dynpm_idle_work_handler()
1809 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; radeon_dynpm_idle_work_handler()
1810 } else if (rdev->pm.dynpm_planned_action == DYNPM_ACTION_NONE && radeon_dynpm_idle_work_handler()
1811 rdev->pm.dynpm_can_upclock) { radeon_dynpm_idle_work_handler()
1812 rdev->pm.dynpm_planned_action = radeon_dynpm_idle_work_handler()
1814 rdev->pm.dynpm_action_timeout = jiffies + radeon_dynpm_idle_work_handler()
1818 if (rdev->pm.dynpm_planned_action == DYNPM_ACTION_UPCLOCK) { radeon_dynpm_idle_work_handler()
1819 rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE; radeon_dynpm_idle_work_handler()
1820 } else if (rdev->pm.dynpm_planned_action == DYNPM_ACTION_NONE && radeon_dynpm_idle_work_handler()
1821 rdev->pm.dynpm_can_downclock) { radeon_dynpm_idle_work_handler()
1822 rdev->pm.dynpm_planned_action = radeon_dynpm_idle_work_handler()
1824 rdev->pm.dynpm_action_timeout = jiffies + radeon_dynpm_idle_work_handler()
1832 if (rdev->pm.dynpm_planned_action != DYNPM_ACTION_NONE && radeon_dynpm_idle_work_handler()
1833 jiffies > rdev->pm.dynpm_action_timeout) { radeon_dynpm_idle_work_handler()
1834 radeon_pm_get_dynpm_state(rdev); radeon_dynpm_idle_work_handler()
1835 radeon_pm_set_clocks(rdev); radeon_dynpm_idle_work_handler()
1838 schedule_delayed_work(&rdev->pm.dynpm_idle_work, radeon_dynpm_idle_work_handler()
1841 mutex_unlock(&rdev->pm.mutex); radeon_dynpm_idle_work_handler()
1842 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched); radeon_dynpm_idle_work_handler()
1854 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_pm_info() local
1855 struct drm_device *ddev = rdev->ddev; radeon_debugfs_pm_info()
1857 if ((rdev->flags & RADEON_IS_PX) && radeon_debugfs_pm_info()
1860 } else if (rdev->pm.dpm_enabled) { radeon_debugfs_pm_info()
1861 mutex_lock(&rdev->pm.mutex); radeon_debugfs_pm_info()
1862 if (rdev->asic->dpm.debugfs_print_current_performance_level) radeon_debugfs_pm_info()
1863 radeon_dpm_debugfs_print_current_performance_level(rdev, m); radeon_debugfs_pm_info()
1866 mutex_unlock(&rdev->pm.mutex); radeon_debugfs_pm_info()
1868 seq_printf(m, "default engine clock: %u0 kHz\n", rdev->pm.default_sclk); radeon_debugfs_pm_info()
1870 if ((rdev->family >= CHIP_PALM) && (rdev->flags & RADEON_IS_IGP)) radeon_debugfs_pm_info()
1871 seq_printf(m, "current engine clock: %u0 kHz\n", rdev->pm.current_sclk); radeon_debugfs_pm_info()
1873 seq_printf(m, "current engine clock: %u0 kHz\n", radeon_get_engine_clock(rdev)); radeon_debugfs_pm_info()
1874 seq_printf(m, "default memory clock: %u0 kHz\n", rdev->pm.default_mclk); radeon_debugfs_pm_info()
1875 if (rdev->asic->pm.get_memory_clock) radeon_debugfs_pm_info()
1876 seq_printf(m, "current memory clock: %u0 kHz\n", radeon_get_memory_clock(rdev)); radeon_debugfs_pm_info()
1877 if (rdev->pm.current_vddc) radeon_debugfs_pm_info()
1878 seq_printf(m, "voltage: %u mV\n", rdev->pm.current_vddc); radeon_debugfs_pm_info()
1879 if (rdev->asic->pm.get_pcie_lanes) radeon_debugfs_pm_info()
1880 seq_printf(m, "PCIE lanes: %d\n", radeon_get_pcie_lanes(rdev)); radeon_debugfs_pm_info()
1891 static int radeon_debugfs_pm_init(struct radeon_device *rdev) radeon_debugfs_pm_init() argument
1894 return radeon_debugfs_add_files(rdev, radeon_pm_info_list, ARRAY_SIZE(radeon_pm_info_list)); radeon_debugfs_pm_init()
H A Drv6xx_dpm.c34 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
44 static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev) rv6xx_get_pi() argument
46 struct rv6xx_power_info *pi = rdev->pm.dpm.priv; rv6xx_get_pi()
51 static void rv6xx_force_pcie_gen1(struct radeon_device *rdev) rv6xx_force_pcie_gen1() argument
64 for (i = 0; i < rdev->usec_timeout; i++) { rv6xx_force_pcie_gen1()
75 static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev) rv6xx_enable_pcie_gen2_support() argument
88 static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, rv6xx_enable_bif_dynamic_pcie_gen2() argument
101 static void rv6xx_enable_l0s(struct radeon_device *rdev) rv6xx_enable_l0s() argument
110 static void rv6xx_enable_l1(struct radeon_device *rdev) rv6xx_enable_l1() argument
122 static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev) rv6xx_enable_pll_sleep_in_l1() argument
139 static int rv6xx_convert_clock_to_stepping(struct radeon_device *rdev, rv6xx_convert_clock_to_stepping() argument
145 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv6xx_convert_clock_to_stepping()
160 static void rv6xx_output_stepping(struct radeon_device *rdev, rv6xx_output_stepping() argument
163 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_output_stepping()
164 u32 ref_clk = rdev->clock.spll.reference_freq; rv6xx_output_stepping()
166 u32 spll_step_count = rv6xx_scale_count_given_unit(rdev, rv6xx_output_stepping()
171 r600_engine_clock_entry_enable(rdev, step_index, true); rv6xx_output_stepping()
172 r600_engine_clock_entry_enable_pulse_skipping(rdev, step_index, false); rv6xx_output_stepping()
175 r600_engine_clock_entry_enable_post_divider(rdev, step_index, false); rv6xx_output_stepping()
180 r600_engine_clock_entry_enable_post_divider(rdev, step_index, true); rv6xx_output_stepping()
181 r600_engine_clock_entry_set_post_divider(rdev, step_index, (hi_len << 4) | lo_len); rv6xx_output_stepping()
187 r600_engine_clock_entry_set_reference_divider(rdev, step_index, rv6xx_output_stepping()
189 r600_engine_clock_entry_set_feedback_divider(rdev, step_index, fb_divider); rv6xx_output_stepping()
190 r600_engine_clock_entry_set_step_time(rdev, step_index, spll_step_count); rv6xx_output_stepping()
194 static struct rv6xx_sclk_stepping rv6xx_next_vco_step(struct radeon_device *rdev, rv6xx_next_vco_step() argument
210 static bool rv6xx_can_step_post_div(struct radeon_device *rdev, rv6xx_can_step_post_div() argument
219 static struct rv6xx_sclk_stepping rv6xx_next_post_div_step(struct radeon_device *rdev, rv6xx_next_post_div_step() argument
225 while (rv6xx_can_step_post_div(rdev, &next, target)) rv6xx_next_post_div_step()
231 static bool rv6xx_reached_stepping_target(struct radeon_device *rdev, rv6xx_reached_stepping_target() argument
240 static void rv6xx_generate_steps(struct radeon_device *rdev, rv6xx_generate_steps() argument
249 rv6xx_convert_clock_to_stepping(rdev, low, &cur); rv6xx_generate_steps()
250 rv6xx_convert_clock_to_stepping(rdev, high, &target); rv6xx_generate_steps()
252 rv6xx_output_stepping(rdev, step_index++, &cur); rv6xx_generate_steps()
262 if (rv6xx_can_step_post_div(rdev, &cur, &target)) rv6xx_generate_steps()
263 next = rv6xx_next_post_div_step(rdev, &cur, &target); rv6xx_generate_steps()
265 next = rv6xx_next_vco_step(rdev, &cur, increasing_vco, R600_VCOSTEPPCT_DFLT); rv6xx_generate_steps()
267 if (rv6xx_reached_stepping_target(rdev, &next, &target, increasing_vco)) { rv6xx_generate_steps()
269 rv6xx_next_vco_step(rdev, &target, !increasing_vco, R600_ENDINGVCOSTEPPCT_DFLT); rv6xx_generate_steps()
272 if (!rv6xx_reached_stepping_target(rdev, &tiny, &cur, !increasing_vco)) rv6xx_generate_steps()
273 rv6xx_output_stepping(rdev, step_index++, &tiny); rv6xx_generate_steps()
282 rv6xx_output_stepping(rdev, step_index++, &final_vco); rv6xx_generate_steps()
285 rv6xx_output_stepping(rdev, step_index++, &target); rv6xx_generate_steps()
288 rv6xx_output_stepping(rdev, step_index++, &next); rv6xx_generate_steps()
297 static void rv6xx_generate_single_step(struct radeon_device *rdev, rv6xx_generate_single_step() argument
302 rv6xx_convert_clock_to_stepping(rdev, clock, &step); rv6xx_generate_single_step()
303 rv6xx_output_stepping(rdev, index, &step); rv6xx_generate_single_step()
306 static void rv6xx_invalidate_intermediate_steps_range(struct radeon_device *rdev, rv6xx_invalidate_intermediate_steps_range() argument
312 r600_engine_clock_entry_enable(rdev, step_index, false); rv6xx_invalidate_intermediate_steps_range()
315 static void rv6xx_set_engine_spread_spectrum_clk_s(struct radeon_device *rdev, rv6xx_set_engine_spread_spectrum_clk_s() argument
322 static void rv6xx_set_engine_spread_spectrum_clk_v(struct radeon_device *rdev, rv6xx_set_engine_spread_spectrum_clk_v() argument
329 static void rv6xx_enable_engine_spread_spectrum(struct radeon_device *rdev, rv6xx_enable_engine_spread_spectrum() argument
340 static void rv6xx_set_memory_spread_spectrum_clk_s(struct radeon_device *rdev, rv6xx_set_memory_spread_spectrum_clk_s() argument
346 static void rv6xx_set_memory_spread_spectrum_clk_v(struct radeon_device *rdev, rv6xx_set_memory_spread_spectrum_clk_v() argument
352 static void rv6xx_enable_memory_spread_spectrum(struct radeon_device *rdev, rv6xx_enable_memory_spread_spectrum() argument
361 static void rv6xx_enable_dynamic_spread_spectrum(struct radeon_device *rdev, rv6xx_enable_dynamic_spread_spectrum() argument
370 static void rv6xx_memory_clock_entry_enable_post_divider(struct radeon_device *rdev, rv6xx_memory_clock_entry_enable_post_divider() argument
380 static void rv6xx_memory_clock_entry_set_post_divider(struct radeon_device *rdev, rv6xx_memory_clock_entry_set_post_divider() argument
387 static void rv6xx_memory_clock_entry_set_feedback_divider(struct radeon_device *rdev, rv6xx_memory_clock_entry_set_feedback_divider() argument
394 static void rv6xx_memory_clock_entry_set_reference_divider(struct radeon_device *rdev, rv6xx_memory_clock_entry_set_reference_divider() argument
401 static void rv6xx_vid_response_set_brt(struct radeon_device *rdev, u32 rt) rv6xx_vid_response_set_brt() argument
406 static void rv6xx_enable_engine_feedback_and_reference_sync(struct radeon_device *rdev) rv6xx_enable_engine_feedback_and_reference_sync() argument
418 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev, rv6xx_scale_count_given_unit() argument
426 static u32 rv6xx_compute_count_for_delay(struct radeon_device *rdev, rv6xx_compute_count_for_delay() argument
429 u32 ref_clk = rdev->clock.spll.reference_freq; rv6xx_compute_count_for_delay()
431 return rv6xx_scale_count_given_unit(rdev, delay_us * (ref_clk / 100), unit); rv6xx_compute_count_for_delay()
434 static void rv6xx_calculate_engine_speed_stepping_parameters(struct radeon_device *rdev, rv6xx_calculate_engine_speed_stepping_parameters() argument
437 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_calculate_engine_speed_stepping_parameters()
451 static void rv6xx_calculate_memory_clock_stepping_parameters(struct radeon_device *rdev, rv6xx_calculate_memory_clock_stepping_parameters() argument
454 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_calculate_memory_clock_stepping_parameters()
481 static void rv6xx_calculate_voltage_stepping_parameters(struct radeon_device *rdev, rv6xx_calculate_voltage_stepping_parameters() argument
484 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_calculate_voltage_stepping_parameters()
549 static void rv6xx_program_engine_spread_spectrum(struct radeon_device *rdev, rv6xx_program_engine_spread_spectrum() argument
552 u32 ref_clk = rdev->clock.spll.reference_freq; rv6xx_program_engine_spread_spectrum()
553 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_engine_spread_spectrum()
558 rv6xx_enable_engine_spread_spectrum(rdev, level, false); rv6xx_program_engine_spread_spectrum()
561 if (radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, clock, false, &dividers) == 0) { rv6xx_program_engine_spread_spectrum()
565 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv6xx_program_engine_spread_spectrum()
576 rv6xx_set_engine_spread_spectrum_clk_v(rdev, level, clk_v); rv6xx_program_engine_spread_spectrum()
577 rv6xx_set_engine_spread_spectrum_clk_s(rdev, level, clk_s); rv6xx_program_engine_spread_spectrum()
578 rv6xx_enable_engine_spread_spectrum(rdev, level, true); rv6xx_program_engine_spread_spectrum()
584 static void rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry(struct radeon_device *rdev) rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry() argument
586 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry()
588 rv6xx_program_engine_spread_spectrum(rdev, rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry()
592 rv6xx_program_engine_spread_spectrum(rdev, rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry()
598 static int rv6xx_program_mclk_stepping_entry(struct radeon_device *rdev, rv6xx_program_mclk_stepping_entry() argument
603 if (radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, clock, false, &dividers)) rv6xx_program_mclk_stepping_entry()
607 rv6xx_memory_clock_entry_set_reference_divider(rdev, entry, dividers.ref_div); rv6xx_program_mclk_stepping_entry()
608 rv6xx_memory_clock_entry_set_feedback_divider(rdev, entry, dividers.fb_div); rv6xx_program_mclk_stepping_entry()
609 rv6xx_memory_clock_entry_set_post_divider(rdev, entry, dividers.post_div); rv6xx_program_mclk_stepping_entry()
612 rv6xx_memory_clock_entry_enable_post_divider(rdev, entry, true); rv6xx_program_mclk_stepping_entry()
614 rv6xx_memory_clock_entry_enable_post_divider(rdev, entry, false); rv6xx_program_mclk_stepping_entry()
619 static void rv6xx_program_mclk_stepping_parameters_except_lowest_entry(struct radeon_device *rdev) rv6xx_program_mclk_stepping_parameters_except_lowest_entry() argument
621 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_mclk_stepping_parameters_except_lowest_entry()
626 rv6xx_program_mclk_stepping_entry(rdev, i, rv6xx_program_mclk_stepping_parameters_except_lowest_entry()
631 static void rv6xx_find_memory_clock_with_highest_vco(struct radeon_device *rdev, rv6xx_find_memory_clock_with_highest_vco() argument
637 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_find_memory_clock_with_highest_vco()
641 if (radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, rv6xx_find_memory_clock_with_highest_vco()
653 static void rv6xx_program_mclk_spread_spectrum_parameters(struct radeon_device *rdev) rv6xx_program_mclk_spread_spectrum_parameters() argument
655 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_mclk_spread_spectrum_parameters()
656 u32 ref_clk = rdev->clock.mpll.reference_freq; rv6xx_program_mclk_spread_spectrum_parameters()
661 rv6xx_enable_memory_spread_spectrum(rdev, false); rv6xx_program_mclk_spread_spectrum_parameters()
664 rv6xx_find_memory_clock_with_highest_vco(rdev, rv6xx_program_mclk_spread_spectrum_parameters()
670 rv6xx_find_memory_clock_with_highest_vco(rdev, rv6xx_program_mclk_spread_spectrum_parameters()
676 rv6xx_find_memory_clock_with_highest_vco(rdev, rv6xx_program_mclk_spread_spectrum_parameters()
683 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv6xx_program_mclk_spread_spectrum_parameters()
694 rv6xx_set_memory_spread_spectrum_clk_v(rdev, clk_v); rv6xx_program_mclk_spread_spectrum_parameters()
695 rv6xx_set_memory_spread_spectrum_clk_s(rdev, clk_s); rv6xx_program_mclk_spread_spectrum_parameters()
696 rv6xx_enable_memory_spread_spectrum(rdev, true); rv6xx_program_mclk_spread_spectrum_parameters()
702 static int rv6xx_program_voltage_stepping_entry(struct radeon_device *rdev, rv6xx_program_voltage_stepping_entry() argument
708 ret = radeon_atom_get_voltage_gpio_settings(rdev, voltage, rv6xx_program_voltage_stepping_entry()
714 r600_voltage_control_program_voltages(rdev, entry, set_pins); rv6xx_program_voltage_stepping_entry()
719 static void rv6xx_program_voltage_stepping_parameters_except_lowest_entry(struct radeon_device *rdev) rv6xx_program_voltage_stepping_parameters_except_lowest_entry() argument
721 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_voltage_stepping_parameters_except_lowest_entry()
725 rv6xx_program_voltage_stepping_entry(rdev, i, rv6xx_program_voltage_stepping_parameters_except_lowest_entry()
730 static void rv6xx_program_backbias_stepping_parameters_except_lowest_entry(struct radeon_device *rdev) rv6xx_program_backbias_stepping_parameters_except_lowest_entry() argument
732 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_backbias_stepping_parameters_except_lowest_entry()
745 static void rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry(struct radeon_device *rdev) rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry() argument
747 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry()
749 rv6xx_program_engine_spread_spectrum(rdev, rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry()
754 static void rv6xx_program_mclk_stepping_parameters_lowest_entry(struct radeon_device *rdev) rv6xx_program_mclk_stepping_parameters_lowest_entry() argument
756 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_mclk_stepping_parameters_lowest_entry()
759 rv6xx_program_mclk_stepping_entry(rdev, 0, rv6xx_program_mclk_stepping_parameters_lowest_entry()
763 static void rv6xx_program_voltage_stepping_parameters_lowest_entry(struct radeon_device *rdev) rv6xx_program_voltage_stepping_parameters_lowest_entry() argument
765 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_voltage_stepping_parameters_lowest_entry()
767 rv6xx_program_voltage_stepping_entry(rdev, 0, rv6xx_program_voltage_stepping_parameters_lowest_entry()
772 static void rv6xx_program_backbias_stepping_parameters_lowest_entry(struct radeon_device *rdev) rv6xx_program_backbias_stepping_parameters_lowest_entry() argument
774 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_backbias_stepping_parameters_lowest_entry()
782 static u32 calculate_memory_refresh_rate(struct radeon_device *rdev, calculate_memory_refresh_rate() argument
795 static void rv6xx_program_memory_timing_parameters(struct radeon_device *rdev) rv6xx_program_memory_timing_parameters() argument
797 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_memory_timing_parameters()
809 radeon_atom_set_engine_dram_timings(rdev, high_clock, 0); rv6xx_program_memory_timing_parameters()
818 (POWERMODE0(calculate_memory_refresh_rate(rdev, rv6xx_program_memory_timing_parameters()
820 POWERMODE1(calculate_memory_refresh_rate(rdev, rv6xx_program_memory_timing_parameters()
822 POWERMODE2(calculate_memory_refresh_rate(rdev, rv6xx_program_memory_timing_parameters()
824 POWERMODE3(calculate_memory_refresh_rate(rdev, rv6xx_program_memory_timing_parameters()
829 static void rv6xx_program_mpll_timing_parameters(struct radeon_device *rdev) rv6xx_program_mpll_timing_parameters() argument
831 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_mpll_timing_parameters()
833 r600_set_mpll_lock_time(rdev, R600_MPLLLOCKTIME_DFLT * rv6xx_program_mpll_timing_parameters()
835 r600_set_mpll_reset_time(rdev, R600_MPLLRESETTIME_DFLT); rv6xx_program_mpll_timing_parameters()
838 static void rv6xx_program_bsp(struct radeon_device *rdev) rv6xx_program_bsp() argument
840 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_bsp()
841 u32 ref_clk = rdev->clock.spll.reference_freq; rv6xx_program_bsp()
848 r600_set_bsp(rdev, pi->bsu, pi->bsp); rv6xx_program_bsp()
851 static void rv6xx_program_at(struct radeon_device *rdev) rv6xx_program_at() argument
853 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_at()
855 r600_set_at(rdev, rv6xx_program_at()
862 static void rv6xx_program_git(struct radeon_device *rdev) rv6xx_program_git() argument
864 r600_set_git(rdev, R600_GICST_DFLT); rv6xx_program_git()
867 static void rv6xx_program_tp(struct radeon_device *rdev) rv6xx_program_tp() argument
872 r600_set_tc(rdev, i, r600_utc[i], r600_dtc[i]); rv6xx_program_tp()
874 r600_select_td(rdev, R600_TD_DFLT); rv6xx_program_tp()
877 static void rv6xx_program_vc(struct radeon_device *rdev) rv6xx_program_vc() argument
879 r600_set_vrc(rdev, R600_VRC_DFLT); rv6xx_program_vc()
882 static void rv6xx_clear_vc(struct radeon_device *rdev) rv6xx_clear_vc() argument
884 r600_set_vrc(rdev, 0); rv6xx_clear_vc()
887 static void rv6xx_program_tpp(struct radeon_device *rdev) rv6xx_program_tpp() argument
889 r600_set_tpu(rdev, R600_TPU_DFLT); rv6xx_program_tpp()
890 r600_set_tpc(rdev, R600_TPC_DFLT); rv6xx_program_tpp()
893 static void rv6xx_program_sstp(struct radeon_device *rdev) rv6xx_program_sstp() argument
895 r600_set_sstu(rdev, R600_SSTU_DFLT); rv6xx_program_sstp()
896 r600_set_sst(rdev, R600_SST_DFLT); rv6xx_program_sstp()
899 static void rv6xx_program_fcp(struct radeon_device *rdev) rv6xx_program_fcp() argument
901 r600_set_fctu(rdev, R600_FCTU_DFLT); rv6xx_program_fcp()
902 r600_set_fct(rdev, R600_FCT_DFLT); rv6xx_program_fcp()
905 static void rv6xx_program_vddc3d_parameters(struct radeon_device *rdev) rv6xx_program_vddc3d_parameters() argument
907 r600_set_vddc3d_oorsu(rdev, R600_VDDC3DOORSU_DFLT); rv6xx_program_vddc3d_parameters()
908 r600_set_vddc3d_oorphc(rdev, R600_VDDC3DOORPHC_DFLT); rv6xx_program_vddc3d_parameters()
909 r600_set_vddc3d_oorsdc(rdev, R600_VDDC3DOORSDC_DFLT); rv6xx_program_vddc3d_parameters()
910 r600_set_ctxcgtt3d_rphc(rdev, R600_CTXCGTT3DRPHC_DFLT); rv6xx_program_vddc3d_parameters()
911 r600_set_ctxcgtt3d_rsdc(rdev, R600_CTXCGTT3DRSDC_DFLT); rv6xx_program_vddc3d_parameters()
914 static void rv6xx_program_voltage_timing_parameters(struct radeon_device *rdev) rv6xx_program_voltage_timing_parameters() argument
918 r600_vid_rt_set_vru(rdev, R600_VRU_DFLT); rv6xx_program_voltage_timing_parameters()
920 r600_vid_rt_set_vrt(rdev, rv6xx_program_voltage_timing_parameters()
921 rv6xx_compute_count_for_delay(rdev, rv6xx_program_voltage_timing_parameters()
922 rdev->pm.dpm.voltage_response_time, rv6xx_program_voltage_timing_parameters()
925 rt = rv6xx_compute_count_for_delay(rdev, rv6xx_program_voltage_timing_parameters()
926 rdev->pm.dpm.backbias_response_time, rv6xx_program_voltage_timing_parameters()
929 rv6xx_vid_response_set_brt(rdev, (rt + 0x1F) >> 5); rv6xx_program_voltage_timing_parameters()
932 static void rv6xx_program_engine_speed_parameters(struct radeon_device *rdev) rv6xx_program_engine_speed_parameters() argument
934 r600_vid_rt_set_ssu(rdev, R600_SPLLSTEPUNIT_DFLT); rv6xx_program_engine_speed_parameters()
935 rv6xx_enable_engine_feedback_and_reference_sync(rdev); rv6xx_program_engine_speed_parameters()
938 static u64 rv6xx_get_master_voltage_mask(struct radeon_device *rdev) rv6xx_get_master_voltage_mask() argument
940 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_get_master_voltage_mask()
948 ret = radeon_atom_get_voltage_gpio_settings(rdev, rv6xx_get_master_voltage_mask()
960 static void rv6xx_program_voltage_gpio_pins(struct radeon_device *rdev) rv6xx_program_voltage_gpio_pins() argument
962 r600_voltage_control_enable_pins(rdev, rv6xx_program_voltage_gpio_pins()
963 rv6xx_get_master_voltage_mask(rdev)); rv6xx_program_voltage_gpio_pins()
966 static void rv6xx_enable_static_voltage_control(struct radeon_device *rdev, rv6xx_enable_static_voltage_control() argument
973 radeon_atom_set_voltage(rdev, rv6xx_enable_static_voltage_control()
977 r600_voltage_control_deactivate_static_control(rdev, rv6xx_enable_static_voltage_control()
978 rv6xx_get_master_voltage_mask(rdev)); rv6xx_enable_static_voltage_control()
981 static void rv6xx_enable_display_gap(struct radeon_device *rdev, bool enable) rv6xx_enable_display_gap() argument
997 static void rv6xx_program_power_level_enter_state(struct radeon_device *rdev) rv6xx_program_power_level_enter_state() argument
999 r600_power_level_set_enter_index(rdev, R600_POWER_LEVEL_MEDIUM); rv6xx_program_power_level_enter_state()
1019 static void rv6xx_calculate_ap(struct radeon_device *rdev, rv6xx_calculate_ap() argument
1022 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_calculate_ap()
1046 static void rv6xx_calculate_stepping_parameters(struct radeon_device *rdev, rv6xx_calculate_stepping_parameters() argument
1051 rv6xx_calculate_engine_speed_stepping_parameters(rdev, new_state); rv6xx_calculate_stepping_parameters()
1052 rv6xx_calculate_memory_clock_stepping_parameters(rdev, new_state); rv6xx_calculate_stepping_parameters()
1053 rv6xx_calculate_voltage_stepping_parameters(rdev, new_state); rv6xx_calculate_stepping_parameters()
1054 rv6xx_calculate_ap(rdev, new_state); rv6xx_calculate_stepping_parameters()
1057 static void rv6xx_program_stepping_parameters_except_lowest_entry(struct radeon_device *rdev) rv6xx_program_stepping_parameters_except_lowest_entry() argument
1059 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1061 rv6xx_program_mclk_stepping_parameters_except_lowest_entry(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1063 rv6xx_program_voltage_stepping_parameters_except_lowest_entry(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1064 rv6xx_program_backbias_stepping_parameters_except_lowest_entry(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1065 rv6xx_program_sclk_spread_spectrum_parameters_except_lowest_entry(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1066 rv6xx_program_mclk_spread_spectrum_parameters(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1067 rv6xx_program_memory_timing_parameters(rdev); rv6xx_program_stepping_parameters_except_lowest_entry()
1070 static void rv6xx_program_stepping_parameters_lowest_entry(struct radeon_device *rdev) rv6xx_program_stepping_parameters_lowest_entry() argument
1072 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_stepping_parameters_lowest_entry()
1074 rv6xx_program_mclk_stepping_parameters_lowest_entry(rdev); rv6xx_program_stepping_parameters_lowest_entry()
1076 rv6xx_program_voltage_stepping_parameters_lowest_entry(rdev); rv6xx_program_stepping_parameters_lowest_entry()
1077 rv6xx_program_backbias_stepping_parameters_lowest_entry(rdev); rv6xx_program_stepping_parameters_lowest_entry()
1078 rv6xx_program_sclk_spread_spectrum_parameters_lowest_entry(rdev); rv6xx_program_stepping_parameters_lowest_entry()
1081 static void rv6xx_program_power_level_low(struct radeon_device *rdev) rv6xx_program_power_level_low() argument
1083 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_power_level_low()
1085 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low()
1087 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low()
1089 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low()
1091 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low()
1093 r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low()
1097 static void rv6xx_program_power_level_low_to_lowest_state(struct radeon_device *rdev) rv6xx_program_power_level_low_to_lowest_state() argument
1099 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_power_level_low_to_lowest_state()
1101 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_LOW, 0); rv6xx_program_power_level_low_to_lowest_state()
1102 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_LOW, 0); rv6xx_program_power_level_low_to_lowest_state()
1103 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_LOW, 0); rv6xx_program_power_level_low_to_lowest_state()
1105 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low_to_lowest_state()
1108 r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_LOW, rv6xx_program_power_level_low_to_lowest_state()
1113 static void rv6xx_program_power_level_medium(struct radeon_device *rdev) rv6xx_program_power_level_medium() argument
1115 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_power_level_medium()
1117 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium()
1119 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium()
1121 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium()
1123 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium()
1125 r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium()
1129 static void rv6xx_program_power_level_medium_for_transition(struct radeon_device *rdev) rv6xx_program_power_level_medium_for_transition() argument
1131 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_power_level_medium_for_transition()
1133 rv6xx_program_mclk_stepping_entry(rdev, rv6xx_program_power_level_medium_for_transition()
1137 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_MEDIUM, 1); rv6xx_program_power_level_medium_for_transition()
1139 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium_for_transition()
1141 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium_for_transition()
1144 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium_for_transition()
1147 rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_program_power_level_medium_for_transition()
1149 r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_MEDIUM, rv6xx_program_power_level_medium_for_transition()
1153 static void rv6xx_program_power_level_high(struct radeon_device *rdev) rv6xx_program_power_level_high() argument
1155 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_program_power_level_high()
1157 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_HIGH, rv6xx_program_power_level_high()
1159 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_HIGH, rv6xx_program_power_level_high()
1161 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_HIGH, rv6xx_program_power_level_high()
1164 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_HIGH, rv6xx_program_power_level_high()
1167 r600_power_level_set_pcie_gen2(rdev, R600_POWER_LEVEL_HIGH, rv6xx_program_power_level_high()
1171 static void rv6xx_enable_backbias(struct radeon_device *rdev, bool enable) rv6xx_enable_backbias() argument
1181 static void rv6xx_program_display_gap(struct radeon_device *rdev) rv6xx_program_display_gap() argument
1186 if (rdev->pm.dpm.new_active_crtcs & 1) { rv6xx_program_display_gap()
1189 } else if (rdev->pm.dpm.new_active_crtcs & 2) { rv6xx_program_display_gap()
1199 static void rv6xx_set_sw_voltage_to_safe(struct radeon_device *rdev, rv6xx_set_sw_voltage_to_safe() argument
1210 rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW, rv6xx_set_sw_voltage_to_safe()
1217 static void rv6xx_set_sw_voltage_to_low(struct radeon_device *rdev, rv6xx_set_sw_voltage_to_low() argument
1222 rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW, rv6xx_set_sw_voltage_to_low()
1229 static void rv6xx_set_safe_backbias(struct radeon_device *rdev, rv6xx_set_safe_backbias() argument
1243 static void rv6xx_set_safe_pcie_gen2(struct radeon_device *rdev, rv6xx_set_safe_pcie_gen2() argument
1252 rv6xx_force_pcie_gen1(rdev); rv6xx_set_safe_pcie_gen2()
1255 static void rv6xx_enable_dynamic_voltage_control(struct radeon_device *rdev, rv6xx_enable_dynamic_voltage_control() argument
1264 static void rv6xx_enable_dynamic_backbias_control(struct radeon_device *rdev, rv6xx_enable_dynamic_backbias_control() argument
1273 static int rv6xx_step_sw_voltage(struct radeon_device *rdev, rv6xx_step_sw_voltage() argument
1282 if ((radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, rv6xx_step_sw_voltage()
1284 (radeon_atom_round_to_true_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, rv6xx_step_sw_voltage()
1286 (radeon_atom_round_to_true_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, rv6xx_step_sw_voltage()
1297 rv6xx_program_voltage_stepping_entry(rdev, R600_POWER_LEVEL_CTXSW, rv6xx_step_sw_voltage()
1299 msleep((rdev->pm.dpm.voltage_response_time + 999) / 1000); rv6xx_step_sw_voltage()
1305 static int rv6xx_step_voltage_if_increasing(struct radeon_device *rdev, rv6xx_step_voltage_if_increasing() argument
1313 return rv6xx_step_sw_voltage(rdev, rv6xx_step_voltage_if_increasing()
1320 static int rv6xx_step_voltage_if_decreasing(struct radeon_device *rdev, rv6xx_step_voltage_if_decreasing() argument
1328 return rv6xx_step_sw_voltage(rdev, rv6xx_step_voltage_if_decreasing()
1335 static void rv6xx_enable_high(struct radeon_device *rdev) rv6xx_enable_high() argument
1337 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_enable_high()
1341 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, true); rv6xx_enable_high()
1344 static void rv6xx_enable_medium(struct radeon_device *rdev) rv6xx_enable_medium() argument
1346 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_enable_medium()
1349 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true); rv6xx_enable_medium()
1352 static void rv6xx_set_dpm_event_sources(struct radeon_device *rdev, u32 sources) rv6xx_set_dpm_event_sources() argument
1354 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_set_dpm_event_sources()
1389 static void rv6xx_enable_auto_throttle_source(struct radeon_device *rdev, rv6xx_enable_auto_throttle_source() argument
1393 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_enable_auto_throttle_source()
1398 rv6xx_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); rv6xx_enable_auto_throttle_source()
1403 rv6xx_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); rv6xx_enable_auto_throttle_source()
1409 static void rv6xx_enable_thermal_protection(struct radeon_device *rdev, rv6xx_enable_thermal_protection() argument
1412 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_enable_thermal_protection()
1415 r600_enable_thermal_protection(rdev, enable); rv6xx_enable_thermal_protection()
1418 static void rv6xx_generate_transition_stepping(struct radeon_device *rdev, rv6xx_generate_transition_stepping() argument
1424 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_generate_transition_stepping()
1426 rv6xx_generate_steps(rdev, rv6xx_generate_transition_stepping()
1432 static void rv6xx_generate_low_step(struct radeon_device *rdev, rv6xx_generate_low_step() argument
1436 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_generate_low_step()
1439 rv6xx_generate_single_step(rdev, rv6xx_generate_low_step()
1444 static void rv6xx_invalidate_intermediate_steps(struct radeon_device *rdev) rv6xx_invalidate_intermediate_steps() argument
1446 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_invalidate_intermediate_steps()
1448 rv6xx_invalidate_intermediate_steps_range(rdev, 0, rv6xx_invalidate_intermediate_steps()
1452 static void rv6xx_generate_stepping_table(struct radeon_device *rdev, rv6xx_generate_stepping_table() argument
1456 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_generate_stepping_table()
1460 rv6xx_generate_steps(rdev, rv6xx_generate_stepping_table()
1465 rv6xx_generate_steps(rdev, rv6xx_generate_stepping_table()
1472 static void rv6xx_enable_spread_spectrum(struct radeon_device *rdev, rv6xx_enable_spread_spectrum() argument
1476 rv6xx_enable_dynamic_spread_spectrum(rdev, true); rv6xx_enable_spread_spectrum()
1478 rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_LOW, false); rv6xx_enable_spread_spectrum()
1479 rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_enable_spread_spectrum()
1480 rv6xx_enable_engine_spread_spectrum(rdev, R600_POWER_LEVEL_HIGH, false); rv6xx_enable_spread_spectrum()
1481 rv6xx_enable_dynamic_spread_spectrum(rdev, false); rv6xx_enable_spread_spectrum()
1482 rv6xx_enable_memory_spread_spectrum(rdev, false); rv6xx_enable_spread_spectrum()
1486 static void rv6xx_reset_lvtm_data_sync(struct radeon_device *rdev) rv6xx_reset_lvtm_data_sync() argument
1488 if (ASIC_IS_DCE3(rdev)) rv6xx_reset_lvtm_data_sync()
1494 static void rv6xx_enable_dynamic_pcie_gen2(struct radeon_device *rdev, rv6xx_enable_dynamic_pcie_gen2() argument
1501 rv6xx_enable_bif_dynamic_pcie_gen2(rdev, true); rv6xx_enable_dynamic_pcie_gen2()
1502 rv6xx_enable_pcie_gen2_support(rdev); rv6xx_enable_dynamic_pcie_gen2()
1503 r600_enable_dynamic_pcie_gen2(rdev, true); rv6xx_enable_dynamic_pcie_gen2()
1506 rv6xx_force_pcie_gen1(rdev); rv6xx_enable_dynamic_pcie_gen2()
1507 rv6xx_enable_bif_dynamic_pcie_gen2(rdev, false); rv6xx_enable_dynamic_pcie_gen2()
1508 r600_enable_dynamic_pcie_gen2(rdev, false); rv6xx_enable_dynamic_pcie_gen2()
1512 static void rv6xx_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, rv6xx_set_uvd_clock_before_set_eng_clock() argument
1526 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rv6xx_set_uvd_clock_before_set_eng_clock()
1529 static void rv6xx_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, rv6xx_set_uvd_clock_after_set_eng_clock() argument
1543 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rv6xx_set_uvd_clock_after_set_eng_clock()
1546 int rv6xx_dpm_enable(struct radeon_device *rdev) rv6xx_dpm_enable() argument
1548 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_dpm_enable()
1549 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; rv6xx_dpm_enable()
1551 if (r600_dynamicpm_enabled(rdev)) rv6xx_dpm_enable()
1554 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv6xx_dpm_enable()
1555 rv6xx_enable_backbias(rdev, true); rv6xx_dpm_enable()
1558 rv6xx_enable_spread_spectrum(rdev, true); rv6xx_dpm_enable()
1560 rv6xx_program_mpll_timing_parameters(rdev); rv6xx_dpm_enable()
1561 rv6xx_program_bsp(rdev); rv6xx_dpm_enable()
1562 rv6xx_program_git(rdev); rv6xx_dpm_enable()
1563 rv6xx_program_tp(rdev); rv6xx_dpm_enable()
1564 rv6xx_program_tpp(rdev); rv6xx_dpm_enable()
1565 rv6xx_program_sstp(rdev); rv6xx_dpm_enable()
1566 rv6xx_program_fcp(rdev); rv6xx_dpm_enable()
1567 rv6xx_program_vddc3d_parameters(rdev); rv6xx_dpm_enable()
1568 rv6xx_program_voltage_timing_parameters(rdev); rv6xx_dpm_enable()
1569 rv6xx_program_engine_speed_parameters(rdev); rv6xx_dpm_enable()
1571 rv6xx_enable_display_gap(rdev, true); rv6xx_dpm_enable()
1573 rv6xx_enable_display_gap(rdev, false); rv6xx_dpm_enable()
1575 rv6xx_program_power_level_enter_state(rdev); rv6xx_dpm_enable()
1577 rv6xx_calculate_stepping_parameters(rdev, boot_ps); rv6xx_dpm_enable()
1580 rv6xx_program_voltage_gpio_pins(rdev); rv6xx_dpm_enable()
1582 rv6xx_generate_stepping_table(rdev, boot_ps); rv6xx_dpm_enable()
1584 rv6xx_program_stepping_parameters_except_lowest_entry(rdev); rv6xx_dpm_enable()
1585 rv6xx_program_stepping_parameters_lowest_entry(rdev); rv6xx_dpm_enable()
1587 rv6xx_program_power_level_low(rdev); rv6xx_dpm_enable()
1588 rv6xx_program_power_level_medium(rdev); rv6xx_dpm_enable()
1589 rv6xx_program_power_level_high(rdev); rv6xx_dpm_enable()
1590 rv6xx_program_vc(rdev); rv6xx_dpm_enable()
1591 rv6xx_program_at(rdev); rv6xx_dpm_enable()
1593 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rv6xx_dpm_enable()
1594 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true); rv6xx_dpm_enable()
1595 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, true); rv6xx_dpm_enable()
1597 rv6xx_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); rv6xx_dpm_enable()
1599 r600_start_dpm(rdev); rv6xx_dpm_enable()
1602 rv6xx_enable_static_voltage_control(rdev, boot_ps, false); rv6xx_dpm_enable()
1605 rv6xx_enable_dynamic_pcie_gen2(rdev, boot_ps, true); rv6xx_dpm_enable()
1608 r600_gfx_clockgating_enable(rdev, true); rv6xx_dpm_enable()
1613 void rv6xx_dpm_disable(struct radeon_device *rdev) rv6xx_dpm_disable() argument
1615 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_dpm_disable()
1616 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; rv6xx_dpm_disable()
1618 if (!r600_dynamicpm_enabled(rdev)) rv6xx_dpm_disable()
1621 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rv6xx_dpm_disable()
1622 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true); rv6xx_dpm_disable()
1623 rv6xx_enable_display_gap(rdev, false); rv6xx_dpm_disable()
1624 rv6xx_clear_vc(rdev); rv6xx_dpm_disable()
1625 r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF); rv6xx_dpm_disable()
1628 r600_enable_thermal_protection(rdev, false); rv6xx_dpm_disable()
1630 r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW); rv6xx_dpm_disable()
1631 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false); rv6xx_dpm_disable()
1632 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_dpm_disable()
1634 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv6xx_dpm_disable()
1635 rv6xx_enable_backbias(rdev, false); rv6xx_dpm_disable()
1637 rv6xx_enable_spread_spectrum(rdev, false); rv6xx_dpm_disable()
1640 rv6xx_enable_static_voltage_control(rdev, boot_ps, true); rv6xx_dpm_disable()
1643 rv6xx_enable_dynamic_pcie_gen2(rdev, boot_ps, false); rv6xx_dpm_disable()
1645 if (rdev->irq.installed && rv6xx_dpm_disable()
1646 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { rv6xx_dpm_disable()
1647 rdev->irq.dpm_thermal = false; rv6xx_dpm_disable()
1648 radeon_irq_set(rdev); rv6xx_dpm_disable()
1652 r600_gfx_clockgating_enable(rdev, false); rv6xx_dpm_disable()
1654 r600_stop_dpm(rdev); rv6xx_dpm_disable()
1657 int rv6xx_dpm_set_power_state(struct radeon_device *rdev) rv6xx_dpm_set_power_state() argument
1659 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_dpm_set_power_state()
1660 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; rv6xx_dpm_set_power_state()
1661 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; rv6xx_dpm_set_power_state()
1666 rv6xx_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1668 rv6xx_clear_vc(rdev); rv6xx_dpm_set_power_state()
1669 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rv6xx_dpm_set_power_state()
1670 r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF); rv6xx_dpm_set_power_state()
1673 r600_enable_thermal_protection(rdev, false); rv6xx_dpm_set_power_state()
1675 r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW); rv6xx_dpm_set_power_state()
1676 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false); rv6xx_dpm_set_power_state()
1677 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_dpm_set_power_state()
1679 rv6xx_generate_transition_stepping(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1680 rv6xx_program_power_level_medium_for_transition(rdev); rv6xx_dpm_set_power_state()
1683 rv6xx_set_sw_voltage_to_safe(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1684 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) rv6xx_dpm_set_power_state()
1685 rv6xx_set_sw_voltage_to_low(rdev, old_ps); rv6xx_dpm_set_power_state()
1688 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv6xx_dpm_set_power_state()
1689 rv6xx_set_safe_backbias(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1692 rv6xx_set_safe_pcie_gen2(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1695 rv6xx_enable_dynamic_voltage_control(rdev, false); rv6xx_dpm_set_power_state()
1697 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv6xx_dpm_set_power_state()
1698 rv6xx_enable_dynamic_backbias_control(rdev, false); rv6xx_dpm_set_power_state()
1701 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) rv6xx_dpm_set_power_state()
1702 rv6xx_step_voltage_if_increasing(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1703 msleep((rdev->pm.dpm.voltage_response_time + 999) / 1000); rv6xx_dpm_set_power_state()
1706 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, true); rv6xx_dpm_set_power_state()
1707 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, false); rv6xx_dpm_set_power_state()
1708 r600_wait_for_power_level_unequal(rdev, R600_POWER_LEVEL_LOW); rv6xx_dpm_set_power_state()
1710 rv6xx_generate_low_step(rdev, new_ps); rv6xx_dpm_set_power_state()
1711 rv6xx_invalidate_intermediate_steps(rdev); rv6xx_dpm_set_power_state()
1712 rv6xx_calculate_stepping_parameters(rdev, new_ps); rv6xx_dpm_set_power_state()
1713 rv6xx_program_stepping_parameters_lowest_entry(rdev); rv6xx_dpm_set_power_state()
1714 rv6xx_program_power_level_low_to_lowest_state(rdev); rv6xx_dpm_set_power_state()
1716 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rv6xx_dpm_set_power_state()
1717 r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW); rv6xx_dpm_set_power_state()
1718 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_dpm_set_power_state()
1721 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) { rv6xx_dpm_set_power_state()
1722 ret = rv6xx_step_voltage_if_decreasing(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1726 rv6xx_enable_dynamic_voltage_control(rdev, true); rv6xx_dpm_set_power_state()
1729 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv6xx_dpm_set_power_state()
1730 rv6xx_enable_dynamic_backbias_control(rdev, true); rv6xx_dpm_set_power_state()
1733 rv6xx_enable_dynamic_pcie_gen2(rdev, new_ps, true); rv6xx_dpm_set_power_state()
1735 rv6xx_reset_lvtm_data_sync(rdev); rv6xx_dpm_set_power_state()
1737 rv6xx_generate_stepping_table(rdev, new_ps); rv6xx_dpm_set_power_state()
1738 rv6xx_program_stepping_parameters_except_lowest_entry(rdev); rv6xx_dpm_set_power_state()
1739 rv6xx_program_power_level_low(rdev); rv6xx_dpm_set_power_state()
1740 rv6xx_program_power_level_medium(rdev); rv6xx_dpm_set_power_state()
1741 rv6xx_program_power_level_high(rdev); rv6xx_dpm_set_power_state()
1742 rv6xx_enable_medium(rdev); rv6xx_dpm_set_power_state()
1743 rv6xx_enable_high(rdev); rv6xx_dpm_set_power_state()
1746 rv6xx_enable_thermal_protection(rdev, true); rv6xx_dpm_set_power_state()
1747 rv6xx_program_vc(rdev); rv6xx_dpm_set_power_state()
1748 rv6xx_program_at(rdev); rv6xx_dpm_set_power_state()
1750 rv6xx_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); rv6xx_dpm_set_power_state()
1755 void rv6xx_setup_asic(struct radeon_device *rdev) rv6xx_setup_asic() argument
1757 r600_enable_acpi_pm(rdev); rv6xx_setup_asic()
1760 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s) rv6xx_setup_asic()
1761 rv6xx_enable_l0s(rdev); rv6xx_setup_asic()
1762 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1) rv6xx_setup_asic()
1763 rv6xx_enable_l1(rdev); rv6xx_setup_asic()
1764 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1) rv6xx_setup_asic()
1765 rv6xx_enable_pll_sleep_in_l1(rdev); rv6xx_setup_asic()
1769 void rv6xx_dpm_display_configuration_changed(struct radeon_device *rdev) rv6xx_dpm_display_configuration_changed() argument
1771 rv6xx_program_display_gap(rdev); rv6xx_dpm_display_configuration_changed()
1795 static void rv6xx_parse_pplib_non_clock_info(struct radeon_device *rdev, rv6xx_parse_pplib_non_clock_info() argument
1812 rdev->pm.dpm.boot_ps = rps; rv6xx_parse_pplib_non_clock_info()
1814 rdev->pm.dpm.uvd_ps = rps; rv6xx_parse_pplib_non_clock_info()
1817 static void rv6xx_parse_pplib_clock_info(struct radeon_device *rdev, rv6xx_parse_pplib_clock_info() argument
1851 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0) rv6xx_parse_pplib_clock_info()
1857 if ((rdev->family == CHIP_RV610) || (rdev->family == CHIP_RV630)) { rv6xx_parse_pplib_clock_info()
1866 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); rv6xx_parse_pplib_clock_info()
1867 pl->mclk = rdev->clock.default_mclk; rv6xx_parse_pplib_clock_info()
1868 pl->sclk = rdev->clock.default_sclk; rv6xx_parse_pplib_clock_info()
1873 static int rv6xx_parse_power_table(struct radeon_device *rdev) rv6xx_parse_power_table() argument
1875 struct radeon_mode_info *mode_info = &rdev->mode_info; rv6xx_parse_power_table()
1891 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * rv6xx_parse_power_table()
1893 if (!rdev->pm.dpm.ps) rv6xx_parse_power_table()
1910 kfree(rdev->pm.dpm.ps); rv6xx_parse_power_table()
1913 rdev->pm.dpm.ps[i].ps_priv = ps; rv6xx_parse_power_table()
1914 rv6xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], rv6xx_parse_power_table()
1922 rv6xx_parse_pplib_clock_info(rdev, rv6xx_parse_power_table()
1923 &rdev->pm.dpm.ps[i], j, rv6xx_parse_power_table()
1928 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; rv6xx_parse_power_table()
1932 int rv6xx_dpm_init(struct radeon_device *rdev) rv6xx_dpm_init() argument
1942 rdev->pm.dpm.priv = pi; rv6xx_dpm_init()
1944 ret = r600_get_platform_caps(rdev); rv6xx_dpm_init()
1948 ret = rv6xx_parse_power_table(rdev); rv6xx_dpm_init()
1952 if (rdev->pm.dpm.voltage_response_time == 0) rv6xx_dpm_init()
1953 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; rv6xx_dpm_init()
1954 if (rdev->pm.dpm.backbias_response_time == 0) rv6xx_dpm_init()
1955 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; rv6xx_dpm_init()
1957 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv6xx_dpm_init()
1964 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, rv6xx_dpm_init()
1971 if (rdev->family >= CHIP_RV670) rv6xx_dpm_init()
1977 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); rv6xx_dpm_init()
1981 pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, rv6xx_dpm_init()
1983 pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, rv6xx_dpm_init()
1997 (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)) rv6xx_dpm_init()
2007 void rv6xx_dpm_print_power_state(struct radeon_device *rdev, rv6xx_dpm_print_power_state() argument
2025 r600_dpm_print_ps_status(rdev, rps); rv6xx_dpm_print_power_state()
2028 void rv6xx_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, rv6xx_dpm_debugfs_print_current_performance_level() argument
2031 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv6xx_dpm_debugfs_print_current_performance_level()
2054 u32 rv6xx_dpm_get_current_sclk(struct radeon_device *rdev) rv6xx_dpm_get_current_sclk() argument
2056 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv6xx_dpm_get_current_sclk()
2077 u32 rv6xx_dpm_get_current_mclk(struct radeon_device *rdev) rv6xx_dpm_get_current_mclk() argument
2079 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv6xx_dpm_get_current_mclk()
2099 void rv6xx_dpm_fini(struct radeon_device *rdev) rv6xx_dpm_fini() argument
2103 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { rv6xx_dpm_fini()
2104 kfree(rdev->pm.dpm.ps[i].ps_priv); rv6xx_dpm_fini()
2106 kfree(rdev->pm.dpm.ps); rv6xx_dpm_fini()
2107 kfree(rdev->pm.dpm.priv); rv6xx_dpm_fini()
2110 u32 rv6xx_dpm_get_sclk(struct radeon_device *rdev, bool low) rv6xx_dpm_get_sclk() argument
2112 struct rv6xx_ps *requested_state = rv6xx_get_ps(rdev->pm.dpm.requested_ps); rv6xx_dpm_get_sclk()
2120 u32 rv6xx_dpm_get_mclk(struct radeon_device *rdev, bool low) rv6xx_dpm_get_mclk() argument
2122 struct rv6xx_ps *requested_state = rv6xx_get_ps(rdev->pm.dpm.requested_ps); rv6xx_dpm_get_mclk()
2130 int rv6xx_dpm_force_performance_level(struct radeon_device *rdev, rv6xx_dpm_force_performance_level() argument
2133 struct rv6xx_power_info *pi = rv6xx_get_pi(rdev); rv6xx_dpm_force_performance_level()
2143 rv6xx_clear_vc(rdev); rv6xx_dpm_force_performance_level()
2144 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rv6xx_dpm_force_performance_level()
2145 r600_set_at(rdev, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF); rv6xx_dpm_force_performance_level()
2146 r600_wait_for_power_level(rdev, R600_POWER_LEVEL_LOW); rv6xx_dpm_force_performance_level()
2147 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false); rv6xx_dpm_force_performance_level()
2148 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false); rv6xx_dpm_force_performance_level()
2149 rv6xx_enable_medium(rdev); rv6xx_dpm_force_performance_level()
2150 rv6xx_enable_high(rdev); rv6xx_dpm_force_performance_level()
2152 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, false); rv6xx_dpm_force_performance_level()
2153 rv6xx_program_vc(rdev); rv6xx_dpm_force_performance_level()
2154 rv6xx_program_at(rdev); rv6xx_dpm_force_performance_level()
2156 rdev->pm.dpm.forced_level = level; rv6xx_dpm_force_performance_level()
H A Dradeon_gart.c58 * @rdev: radeon_device pointer
65 int radeon_gart_table_ram_alloc(struct radeon_device *rdev) radeon_gart_table_ram_alloc() argument
69 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size, radeon_gart_table_ram_alloc()
70 &rdev->gart.table_addr); radeon_gart_table_ram_alloc()
75 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 || radeon_gart_table_ram_alloc()
76 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { radeon_gart_table_ram_alloc()
78 rdev->gart.table_size >> PAGE_SHIFT); radeon_gart_table_ram_alloc()
81 rdev->gart.ptr = ptr; radeon_gart_table_ram_alloc()
82 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size); radeon_gart_table_ram_alloc()
89 * @rdev: radeon_device pointer
95 void radeon_gart_table_ram_free(struct radeon_device *rdev) radeon_gart_table_ram_free() argument
97 if (rdev->gart.ptr == NULL) { radeon_gart_table_ram_free()
101 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 || radeon_gart_table_ram_free()
102 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { radeon_gart_table_ram_free()
103 set_memory_wb((unsigned long)rdev->gart.ptr, radeon_gart_table_ram_free()
104 rdev->gart.table_size >> PAGE_SHIFT); radeon_gart_table_ram_free()
107 pci_free_consistent(rdev->pdev, rdev->gart.table_size, radeon_gart_table_ram_free()
108 (void *)rdev->gart.ptr, radeon_gart_table_ram_free()
109 rdev->gart.table_addr); radeon_gart_table_ram_free()
110 rdev->gart.ptr = NULL; radeon_gart_table_ram_free()
111 rdev->gart.table_addr = 0; radeon_gart_table_ram_free()
117 * @rdev: radeon_device pointer
124 int radeon_gart_table_vram_alloc(struct radeon_device *rdev) radeon_gart_table_vram_alloc() argument
128 if (rdev->gart.robj == NULL) { radeon_gart_table_vram_alloc()
129 r = radeon_bo_create(rdev, rdev->gart.table_size, radeon_gart_table_vram_alloc()
131 0, NULL, NULL, &rdev->gart.robj); radeon_gart_table_vram_alloc()
142 * @rdev: radeon_device pointer
149 int radeon_gart_table_vram_pin(struct radeon_device *rdev) radeon_gart_table_vram_pin() argument
154 r = radeon_bo_reserve(rdev->gart.robj, false); radeon_gart_table_vram_pin()
157 r = radeon_bo_pin(rdev->gart.robj, radeon_gart_table_vram_pin()
160 radeon_bo_unreserve(rdev->gart.robj); radeon_gart_table_vram_pin()
163 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr); radeon_gart_table_vram_pin()
165 radeon_bo_unpin(rdev->gart.robj); radeon_gart_table_vram_pin()
166 radeon_bo_unreserve(rdev->gart.robj); radeon_gart_table_vram_pin()
167 rdev->gart.table_addr = gpu_addr; radeon_gart_table_vram_pin()
175 for (i = 0; i < rdev->gart.num_gpu_pages; i++) radeon_gart_table_vram_pin()
176 radeon_gart_set_page(rdev, i, rdev->gart.pages_entry[i]); radeon_gart_table_vram_pin()
178 radeon_gart_tlb_flush(rdev); radeon_gart_table_vram_pin()
187 * @rdev: radeon_device pointer
192 void radeon_gart_table_vram_unpin(struct radeon_device *rdev) radeon_gart_table_vram_unpin() argument
196 if (rdev->gart.robj == NULL) { radeon_gart_table_vram_unpin()
199 r = radeon_bo_reserve(rdev->gart.robj, false); radeon_gart_table_vram_unpin()
201 radeon_bo_kunmap(rdev->gart.robj); radeon_gart_table_vram_unpin()
202 radeon_bo_unpin(rdev->gart.robj); radeon_gart_table_vram_unpin()
203 radeon_bo_unreserve(rdev->gart.robj); radeon_gart_table_vram_unpin()
204 rdev->gart.ptr = NULL; radeon_gart_table_vram_unpin()
211 * @rdev: radeon_device pointer
217 void radeon_gart_table_vram_free(struct radeon_device *rdev) radeon_gart_table_vram_free() argument
219 if (rdev->gart.robj == NULL) { radeon_gart_table_vram_free()
222 radeon_bo_unref(&rdev->gart.robj); radeon_gart_table_vram_free()
231 * @rdev: radeon_device pointer
238 void radeon_gart_unbind(struct radeon_device *rdev, unsigned offset, radeon_gart_unbind() argument
245 if (!rdev->gart.ready) { radeon_gart_unbind()
252 if (rdev->gart.pages[p]) { radeon_gart_unbind()
253 rdev->gart.pages[p] = NULL; radeon_gart_unbind()
255 rdev->gart.pages_entry[t] = rdev->dummy_page.entry; radeon_gart_unbind()
256 if (rdev->gart.ptr) { radeon_gart_unbind()
257 radeon_gart_set_page(rdev, t, radeon_gart_unbind()
258 rdev->dummy_page.entry); radeon_gart_unbind()
263 if (rdev->gart.ptr) { radeon_gart_unbind()
265 radeon_gart_tlb_flush(rdev); radeon_gart_unbind()
272 * @rdev: radeon_device pointer
283 int radeon_gart_bind(struct radeon_device *rdev, unsigned offset, radeon_gart_bind() argument
292 if (!rdev->gart.ready) { radeon_gart_bind()
300 rdev->gart.pages[p] = pagelist[i]; radeon_gart_bind()
304 rdev->gart.pages_entry[t] = page_entry; radeon_gart_bind()
305 if (rdev->gart.ptr) { radeon_gart_bind()
306 radeon_gart_set_page(rdev, t, page_entry); radeon_gart_bind()
311 if (rdev->gart.ptr) { radeon_gart_bind()
313 radeon_gart_tlb_flush(rdev); radeon_gart_bind()
321 * @rdev: radeon_device pointer
326 int radeon_gart_init(struct radeon_device *rdev) radeon_gart_init() argument
330 if (rdev->gart.pages) { radeon_gart_init()
338 r = radeon_dummy_page_init(rdev); radeon_gart_init()
342 rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE; radeon_gart_init()
343 rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE; radeon_gart_init()
345 rdev->gart.num_cpu_pages, rdev->gart.num_gpu_pages); radeon_gart_init()
347 rdev->gart.pages = vzalloc(sizeof(void *) * rdev->gart.num_cpu_pages); radeon_gart_init()
348 if (rdev->gart.pages == NULL) { radeon_gart_init()
349 radeon_gart_fini(rdev); radeon_gart_init()
352 rdev->gart.pages_entry = vmalloc(sizeof(uint64_t) * radeon_gart_init()
353 rdev->gart.num_gpu_pages); radeon_gart_init()
354 if (rdev->gart.pages_entry == NULL) { radeon_gart_init()
355 radeon_gart_fini(rdev); radeon_gart_init()
359 for (i = 0; i < rdev->gart.num_gpu_pages; i++) radeon_gart_init()
360 rdev->gart.pages_entry[i] = rdev->dummy_page.entry; radeon_gart_init()
367 * @rdev: radeon_device pointer
371 void radeon_gart_fini(struct radeon_device *rdev) radeon_gart_fini() argument
373 if (rdev->gart.ready) { radeon_gart_fini()
375 radeon_gart_unbind(rdev, 0, rdev->gart.num_cpu_pages); radeon_gart_fini()
377 rdev->gart.ready = false; radeon_gart_fini()
378 vfree(rdev->gart.pages); radeon_gart_fini()
379 vfree(rdev->gart.pages_entry); radeon_gart_fini()
380 rdev->gart.pages = NULL; radeon_gart_fini()
381 rdev->gart.pages_entry = NULL; radeon_gart_fini()
383 radeon_dummy_page_fini(rdev); radeon_gart_fini()
H A Drs400.c36 static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
38 void rs400_gart_adjust_size(struct radeon_device *rdev) rs400_gart_adjust_size() argument
41 switch (rdev->mc.gtt_size/(1024*1024)) { rs400_gart_adjust_size()
52 (unsigned)(rdev->mc.gtt_size >> 20)); rs400_gart_adjust_size()
55 rdev->mc.gtt_size = 32 * 1024 * 1024; rs400_gart_adjust_size()
60 void rs400_gart_tlb_flush(struct radeon_device *rdev) rs400_gart_tlb_flush() argument
63 unsigned int timeout = rdev->usec_timeout; rs400_gart_tlb_flush()
76 int rs400_gart_init(struct radeon_device *rdev) rs400_gart_init() argument
80 if (rdev->gart.ptr) { rs400_gart_init()
85 switch(rdev->mc.gtt_size / (1024 * 1024)) { rs400_gart_init()
98 r = radeon_gart_init(rdev); rs400_gart_init()
101 if (rs400_debugfs_pcie_gart_info_init(rdev)) rs400_gart_init()
103 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; rs400_gart_init()
104 return radeon_gart_table_ram_alloc(rdev); rs400_gart_init()
107 int rs400_gart_enable(struct radeon_device *rdev) rs400_gart_enable() argument
116 switch(rdev->mc.gtt_size / (1024 * 1024)) { rs400_gart_enable()
142 if (rdev->family == CHIP_RS690 || (rdev->family == CHIP_RS740)) { rs400_gart_enable()
149 tmp = REG_SET(RS690_MC_AGP_TOP, rdev->mc.gtt_end >> 16); rs400_gart_enable()
150 tmp |= REG_SET(RS690_MC_AGP_START, rdev->mc.gtt_start >> 16); rs400_gart_enable()
151 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740)) { rs400_gart_enable()
161 tmp = (u32)rdev->gart.table_addr & 0xfffff000; rs400_gart_enable()
162 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; rs400_gart_enable()
175 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740)) { rs400_gart_enable()
186 rs400_gart_tlb_flush(rdev); rs400_gart_enable()
188 (unsigned)(rdev->mc.gtt_size >> 20), rs400_gart_enable()
189 (unsigned long long)rdev->gart.table_addr); rs400_gart_enable()
190 rdev->gart.ready = true; rs400_gart_enable()
194 void rs400_gart_disable(struct radeon_device *rdev) rs400_gart_disable() argument
204 void rs400_gart_fini(struct radeon_device *rdev) rs400_gart_fini() argument
206 radeon_gart_fini(rdev); rs400_gart_fini()
207 rs400_gart_disable(rdev); rs400_gart_fini()
208 radeon_gart_table_ram_free(rdev); rs400_gart_fini()
230 void rs400_gart_set_page(struct radeon_device *rdev, unsigned i, rs400_gart_set_page() argument
233 u32 *gtt = rdev->gart.ptr; rs400_gart_set_page()
237 int rs400_mc_wait_for_idle(struct radeon_device *rdev) rs400_mc_wait_for_idle() argument
242 for (i = 0; i < rdev->usec_timeout; i++) { rs400_mc_wait_for_idle()
253 static void rs400_gpu_init(struct radeon_device *rdev) rs400_gpu_init() argument
256 r420_pipes_init(rdev); rs400_gpu_init()
257 if (rs400_mc_wait_for_idle(rdev)) { rs400_gpu_init()
263 static void rs400_mc_init(struct radeon_device *rdev) rs400_mc_init() argument
267 rs400_gart_adjust_size(rdev); rs400_mc_init()
268 rdev->mc.igp_sideport_enabled = radeon_combios_sideport_present(rdev); rs400_mc_init()
270 rdev->mc.vram_is_ddr = true; rs400_mc_init()
271 rdev->mc.vram_width = 128; rs400_mc_init()
272 r100_vram_init_sizes(rdev); rs400_mc_init()
274 radeon_vram_location(rdev, &rdev->mc, base); rs400_mc_init()
275 rdev->mc.gtt_base_align = rdev->mc.gtt_size - 1; rs400_mc_init()
276 radeon_gtt_location(rdev, &rdev->mc); rs400_mc_init()
277 radeon_update_bandwidth_info(rdev); rs400_mc_init()
280 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg) rs400_mc_rreg() argument
285 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs400_mc_rreg()
289 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs400_mc_rreg()
293 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rs400_mc_wreg() argument
297 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs400_mc_wreg()
301 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs400_mc_wreg()
309 struct radeon_device *rdev = dev->dev_private; rs400_debugfs_gart_info() local
318 if (rdev->family == CHIP_RS690 || (rdev->family == CHIP_RS740)) { rs400_debugfs_gart_info()
379 static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev) rs400_debugfs_pcie_gart_info_init() argument
382 return radeon_debugfs_add_files(rdev, rs400_gart_info_list, 1); rs400_debugfs_pcie_gart_info_init()
388 static void rs400_mc_program(struct radeon_device *rdev) rs400_mc_program() argument
393 r100_mc_stop(rdev, &save); rs400_mc_program()
396 if (rs400_mc_wait_for_idle(rdev)) rs400_mc_program()
397 dev_warn(rdev->dev, "rs400: Wait MC idle timeout before updating MC.\n"); rs400_mc_program()
399 S_000148_MC_FB_START(rdev->mc.vram_start >> 16) | rs400_mc_program()
400 S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16)); rs400_mc_program()
402 r100_mc_resume(rdev, &save); rs400_mc_program()
405 static int rs400_startup(struct radeon_device *rdev) rs400_startup() argument
409 r100_set_common_regs(rdev); rs400_startup()
411 rs400_mc_program(rdev); rs400_startup()
413 r300_clock_startup(rdev); rs400_startup()
415 rs400_gpu_init(rdev); rs400_startup()
416 r100_enable_bm(rdev); rs400_startup()
419 r = rs400_gart_enable(rdev); rs400_startup()
424 r = radeon_wb_init(rdev); rs400_startup()
428 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); rs400_startup()
430 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); rs400_startup()
435 if (!rdev->irq.installed) { rs400_startup()
436 r = radeon_irq_kms_init(rdev); rs400_startup()
441 r100_irq_set(rdev); rs400_startup()
442 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); rs400_startup()
444 r = r100_cp_init(rdev, 1024 * 1024); rs400_startup()
446 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); rs400_startup()
450 r = radeon_ib_pool_init(rdev); rs400_startup()
452 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); rs400_startup()
459 int rs400_resume(struct radeon_device *rdev) rs400_resume() argument
464 rs400_gart_disable(rdev); rs400_resume()
466 r300_clock_startup(rdev); rs400_resume()
468 rs400_mc_program(rdev); rs400_resume()
470 if (radeon_asic_reset(rdev)) { rs400_resume()
471 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", rs400_resume()
476 radeon_combios_asic_init(rdev->ddev); rs400_resume()
478 r300_clock_startup(rdev); rs400_resume()
480 radeon_surface_init(rdev); rs400_resume()
482 rdev->accel_working = true; rs400_resume()
483 r = rs400_startup(rdev); rs400_resume()
485 rdev->accel_working = false; rs400_resume()
490 int rs400_suspend(struct radeon_device *rdev) rs400_suspend() argument
492 radeon_pm_suspend(rdev); rs400_suspend()
493 r100_cp_disable(rdev); rs400_suspend()
494 radeon_wb_disable(rdev); rs400_suspend()
495 r100_irq_disable(rdev); rs400_suspend()
496 rs400_gart_disable(rdev); rs400_suspend()
500 void rs400_fini(struct radeon_device *rdev) rs400_fini() argument
502 radeon_pm_fini(rdev); rs400_fini()
503 r100_cp_fini(rdev); rs400_fini()
504 radeon_wb_fini(rdev); rs400_fini()
505 radeon_ib_pool_fini(rdev); rs400_fini()
506 radeon_gem_fini(rdev); rs400_fini()
507 rs400_gart_fini(rdev); rs400_fini()
508 radeon_irq_kms_fini(rdev); rs400_fini()
509 radeon_fence_driver_fini(rdev); rs400_fini()
510 radeon_bo_fini(rdev); rs400_fini()
511 radeon_atombios_fini(rdev); rs400_fini()
512 kfree(rdev->bios); rs400_fini()
513 rdev->bios = NULL; rs400_fini()
516 int rs400_init(struct radeon_device *rdev) rs400_init() argument
521 r100_vga_render_disable(rdev); rs400_init()
523 radeon_scratch_init(rdev); rs400_init()
525 radeon_surface_init(rdev); rs400_init()
528 r100_restore_sanity(rdev); rs400_init()
530 if (!radeon_get_bios(rdev)) { rs400_init()
531 if (ASIC_IS_AVIVO(rdev)) rs400_init()
534 if (rdev->is_atom_bios) { rs400_init()
535 dev_err(rdev->dev, "Expecting combios for RS400/RS480 GPU\n"); rs400_init()
538 r = radeon_combios_init(rdev); rs400_init()
543 if (radeon_asic_reset(rdev)) { rs400_init()
544 dev_warn(rdev->dev, rs400_init()
550 if (radeon_boot_test_post_card(rdev) == false) rs400_init()
554 radeon_get_clock_info(rdev->ddev); rs400_init()
556 rs400_mc_init(rdev); rs400_init()
558 r = radeon_fence_driver_init(rdev); rs400_init()
562 r = radeon_bo_init(rdev); rs400_init()
565 r = rs400_gart_init(rdev); rs400_init()
568 r300_set_reg_safe(rdev); rs400_init()
571 radeon_pm_init(rdev); rs400_init()
573 rdev->accel_working = true; rs400_init()
574 r = rs400_startup(rdev); rs400_init()
577 dev_err(rdev->dev, "Disabling GPU acceleration\n"); rs400_init()
578 r100_cp_fini(rdev); rs400_init()
579 radeon_wb_fini(rdev); rs400_init()
580 radeon_ib_pool_fini(rdev); rs400_init()
581 rs400_gart_fini(rdev); rs400_init()
582 radeon_irq_kms_fini(rdev); rs400_init()
583 rdev->accel_working = false; rs400_init()
H A Dradeon_irq_kms.c51 struct radeon_device *rdev = dev->dev_private; radeon_driver_irq_handler_kms() local
54 ret = radeon_irq_process(rdev); radeon_driver_irq_handler_kms()
76 struct radeon_device *rdev = container_of(work, struct radeon_device, radeon_hotplug_work_func() local
78 struct drm_device *dev = rdev->ddev; radeon_hotplug_work_func()
84 if (!rdev->mode_info.mode_config_initialized) radeon_hotplug_work_func()
99 struct radeon_device *rdev = container_of(work, struct radeon_device, radeon_dp_work_func() local
101 struct drm_device *dev = rdev->ddev; radeon_dp_work_func()
121 struct radeon_device *rdev = dev->dev_private; radeon_driver_irq_preinstall_kms() local
125 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_driver_irq_preinstall_kms()
128 atomic_set(&rdev->irq.ring_int[i], 0); radeon_driver_irq_preinstall_kms()
129 rdev->irq.dpm_thermal = false; radeon_driver_irq_preinstall_kms()
131 rdev->irq.hpd[i] = false; radeon_driver_irq_preinstall_kms()
133 rdev->irq.crtc_vblank_int[i] = false; radeon_driver_irq_preinstall_kms()
134 atomic_set(&rdev->irq.pflip[i], 0); radeon_driver_irq_preinstall_kms()
135 rdev->irq.afmt[i] = false; radeon_driver_irq_preinstall_kms()
137 radeon_irq_set(rdev); radeon_driver_irq_preinstall_kms()
138 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_driver_irq_preinstall_kms()
140 radeon_irq_process(rdev); radeon_driver_irq_preinstall_kms()
153 struct radeon_device *rdev = dev->dev_private; radeon_driver_irq_postinstall_kms() local
155 if (ASIC_IS_AVIVO(rdev)) radeon_driver_irq_postinstall_kms()
172 struct radeon_device *rdev = dev->dev_private; radeon_driver_irq_uninstall_kms() local
176 if (rdev == NULL) { radeon_driver_irq_uninstall_kms()
179 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_driver_irq_uninstall_kms()
182 atomic_set(&rdev->irq.ring_int[i], 0); radeon_driver_irq_uninstall_kms()
183 rdev->irq.dpm_thermal = false; radeon_driver_irq_uninstall_kms()
185 rdev->irq.hpd[i] = false; radeon_driver_irq_uninstall_kms()
187 rdev->irq.crtc_vblank_int[i] = false; radeon_driver_irq_uninstall_kms()
188 atomic_set(&rdev->irq.pflip[i], 0); radeon_driver_irq_uninstall_kms()
189 rdev->irq.afmt[i] = false; radeon_driver_irq_uninstall_kms()
191 radeon_irq_set(rdev); radeon_driver_irq_uninstall_kms()
192 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_driver_irq_uninstall_kms()
198 * @rdev: radeon device pointer
205 static bool radeon_msi_ok(struct radeon_device *rdev) radeon_msi_ok() argument
208 if (rdev->family < CHIP_RV380) radeon_msi_ok()
212 if (rdev->flags & RADEON_IS_AGP) radeon_msi_ok()
220 if (rdev->family < CHIP_BONAIRE) { radeon_msi_ok()
221 dev_info(rdev->dev, "radeon: MSI limited to 32-bit\n"); radeon_msi_ok()
222 rdev->pdev->no_64bit_msi = 1; radeon_msi_ok()
233 if ((rdev->pdev->device == 0x791f) && radeon_msi_ok()
234 (rdev->pdev->subsystem_vendor == 0x103c) && radeon_msi_ok()
235 (rdev->pdev->subsystem_device == 0x30c2)) radeon_msi_ok()
239 if ((rdev->pdev->device == 0x791f) && radeon_msi_ok()
240 (rdev->pdev->subsystem_vendor == 0x1028) && radeon_msi_ok()
241 (rdev->pdev->subsystem_device == 0x01fc)) radeon_msi_ok()
245 if ((rdev->pdev->device == 0x791f) && radeon_msi_ok()
246 (rdev->pdev->subsystem_vendor == 0x1028) && radeon_msi_ok()
247 (rdev->pdev->subsystem_device == 0x01fd)) radeon_msi_ok()
251 if ((rdev->pdev->device == 0x791f) && radeon_msi_ok()
252 (rdev->pdev->subsystem_vendor == 0x107b) && radeon_msi_ok()
253 (rdev->pdev->subsystem_device == 0x0185)) radeon_msi_ok()
257 if (rdev->family == CHIP_RS690) radeon_msi_ok()
264 if (rdev->family == CHIP_RV515) radeon_msi_ok()
266 if (rdev->flags & RADEON_IS_IGP) { radeon_msi_ok()
268 if (rdev->family >= CHIP_PALM) radeon_msi_ok()
280 * @rdev: radeon device pointer
285 int radeon_irq_kms_init(struct radeon_device *rdev) radeon_irq_kms_init() argument
289 spin_lock_init(&rdev->irq.lock); radeon_irq_kms_init()
290 r = drm_vblank_init(rdev->ddev, rdev->num_crtc); radeon_irq_kms_init()
295 rdev->msi_enabled = 0; radeon_irq_kms_init()
297 if (radeon_msi_ok(rdev)) { radeon_irq_kms_init()
298 int ret = pci_enable_msi(rdev->pdev); radeon_irq_kms_init()
300 rdev->msi_enabled = 1; radeon_irq_kms_init()
301 dev_info(rdev->dev, "radeon: using MSI.\n"); radeon_irq_kms_init()
305 INIT_DELAYED_WORK(&rdev->hotplug_work, radeon_hotplug_work_func); radeon_irq_kms_init()
306 INIT_WORK(&rdev->dp_work, radeon_dp_work_func); radeon_irq_kms_init()
307 INIT_WORK(&rdev->audio_work, r600_audio_update_hdmi); radeon_irq_kms_init()
309 rdev->irq.installed = true; radeon_irq_kms_init()
310 r = drm_irq_install(rdev->ddev, rdev->ddev->pdev->irq); radeon_irq_kms_init()
312 rdev->irq.installed = false; radeon_irq_kms_init()
313 flush_delayed_work(&rdev->hotplug_work); radeon_irq_kms_init()
324 * @rdev: radeon device pointer
328 void radeon_irq_kms_fini(struct radeon_device *rdev) radeon_irq_kms_fini() argument
330 drm_vblank_cleanup(rdev->ddev); radeon_irq_kms_fini()
331 if (rdev->irq.installed) { radeon_irq_kms_fini()
332 drm_irq_uninstall(rdev->ddev); radeon_irq_kms_fini()
333 rdev->irq.installed = false; radeon_irq_kms_fini()
334 if (rdev->msi_enabled) radeon_irq_kms_fini()
335 pci_disable_msi(rdev->pdev); radeon_irq_kms_fini()
336 flush_delayed_work(&rdev->hotplug_work); radeon_irq_kms_fini()
343 * @rdev: radeon device pointer
350 void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring) radeon_irq_kms_sw_irq_get() argument
354 if (!rdev->ddev->irq_enabled) radeon_irq_kms_sw_irq_get()
357 if (atomic_inc_return(&rdev->irq.ring_int[ring]) == 1) { radeon_irq_kms_sw_irq_get()
358 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_sw_irq_get()
359 radeon_irq_set(rdev); radeon_irq_kms_sw_irq_get()
360 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_sw_irq_get()
367 * @rdev: radeon device pointer
374 bool radeon_irq_kms_sw_irq_get_delayed(struct radeon_device *rdev, int ring) radeon_irq_kms_sw_irq_get_delayed() argument
376 return atomic_inc_return(&rdev->irq.ring_int[ring]) == 1; radeon_irq_kms_sw_irq_get_delayed()
382 * @rdev: radeon device pointer
389 void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring) radeon_irq_kms_sw_irq_put() argument
393 if (!rdev->ddev->irq_enabled) radeon_irq_kms_sw_irq_put()
396 if (atomic_dec_and_test(&rdev->irq.ring_int[ring])) { radeon_irq_kms_sw_irq_put()
397 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_sw_irq_put()
398 radeon_irq_set(rdev); radeon_irq_kms_sw_irq_put()
399 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_sw_irq_put()
406 * @rdev: radeon device pointer
412 void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc) radeon_irq_kms_pflip_irq_get() argument
416 if (crtc < 0 || crtc >= rdev->num_crtc) radeon_irq_kms_pflip_irq_get()
419 if (!rdev->ddev->irq_enabled) radeon_irq_kms_pflip_irq_get()
422 if (atomic_inc_return(&rdev->irq.pflip[crtc]) == 1) { radeon_irq_kms_pflip_irq_get()
423 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_pflip_irq_get()
424 radeon_irq_set(rdev); radeon_irq_kms_pflip_irq_get()
425 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_pflip_irq_get()
432 * @rdev: radeon device pointer
438 void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc) radeon_irq_kms_pflip_irq_put() argument
442 if (crtc < 0 || crtc >= rdev->num_crtc) radeon_irq_kms_pflip_irq_put()
445 if (!rdev->ddev->irq_enabled) radeon_irq_kms_pflip_irq_put()
448 if (atomic_dec_and_test(&rdev->irq.pflip[crtc])) { radeon_irq_kms_pflip_irq_put()
449 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_pflip_irq_put()
450 radeon_irq_set(rdev); radeon_irq_kms_pflip_irq_put()
451 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_pflip_irq_put()
458 * @rdev: radeon device pointer
463 void radeon_irq_kms_enable_afmt(struct radeon_device *rdev, int block) radeon_irq_kms_enable_afmt() argument
467 if (!rdev->ddev->irq_enabled) radeon_irq_kms_enable_afmt()
470 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_enable_afmt()
471 rdev->irq.afmt[block] = true; radeon_irq_kms_enable_afmt()
472 radeon_irq_set(rdev); radeon_irq_kms_enable_afmt()
473 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_enable_afmt()
480 * @rdev: radeon device pointer
485 void radeon_irq_kms_disable_afmt(struct radeon_device *rdev, int block) radeon_irq_kms_disable_afmt() argument
489 if (!rdev->ddev->irq_enabled) radeon_irq_kms_disable_afmt()
492 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_disable_afmt()
493 rdev->irq.afmt[block] = false; radeon_irq_kms_disable_afmt()
494 radeon_irq_set(rdev); radeon_irq_kms_disable_afmt()
495 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_disable_afmt()
501 * @rdev: radeon device pointer
506 void radeon_irq_kms_enable_hpd(struct radeon_device *rdev, unsigned hpd_mask) radeon_irq_kms_enable_hpd() argument
511 if (!rdev->ddev->irq_enabled) radeon_irq_kms_enable_hpd()
514 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_enable_hpd()
516 rdev->irq.hpd[i] |= !!(hpd_mask & (1 << i)); radeon_irq_kms_enable_hpd()
517 radeon_irq_set(rdev); radeon_irq_kms_enable_hpd()
518 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_enable_hpd()
524 * @rdev: radeon device pointer
529 void radeon_irq_kms_disable_hpd(struct radeon_device *rdev, unsigned hpd_mask) radeon_irq_kms_disable_hpd() argument
534 if (!rdev->ddev->irq_enabled) radeon_irq_kms_disable_hpd()
537 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_irq_kms_disable_hpd()
539 rdev->irq.hpd[i] &= !(hpd_mask & (1 << i)); radeon_irq_kms_disable_hpd()
540 radeon_irq_set(rdev); radeon_irq_kms_disable_hpd()
541 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_irq_kms_disable_hpd()
H A Devergreen.c41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) eg_cg_rreg() argument
46 spin_lock_irqsave(&rdev->cg_idx_lock, flags); eg_cg_rreg()
49 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); eg_cg_rreg()
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) eg_cg_wreg() argument
57 spin_lock_irqsave(&rdev->cg_idx_lock, flags); eg_cg_wreg()
60 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); eg_cg_wreg()
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) eg_pif_phy0_rreg() argument
68 spin_lock_irqsave(&rdev->pif_idx_lock, flags); eg_pif_phy0_rreg()
71 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); eg_pif_phy0_rreg()
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) eg_pif_phy0_wreg() argument
79 spin_lock_irqsave(&rdev->pif_idx_lock, flags); eg_pif_phy0_wreg()
82 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); eg_pif_phy0_wreg()
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg) eg_pif_phy1_rreg() argument
90 spin_lock_irqsave(&rdev->pif_idx_lock, flags); eg_pif_phy1_rreg()
93 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); eg_pif_phy1_rreg()
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v) eg_pif_phy1_wreg() argument
101 spin_lock_irqsave(&rdev->pif_idx_lock, flags); eg_pif_phy1_wreg()
104 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); eg_pif_phy1_wreg()
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
992 static void evergreen_init_golden_registers(struct radeon_device *rdev) evergreen_init_golden_registers() argument
994 switch (rdev->family) { evergreen_init_golden_registers()
997 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1000 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1003 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1008 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1011 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1014 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1019 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1022 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1025 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1030 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1033 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1036 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1041 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1046 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1051 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1054 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1059 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1064 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1069 radeon_program_register_sequence(rdev, evergreen_init_golden_registers()
1081 * @rdev: radeon_device pointer
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev, evergreen_get_allowed_info_register() argument
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock, sumo_set_uvd_clock() argument
1143 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, sumo_set_uvd_clock()
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) sumo_set_uvd_clocks() argument
1166 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS); sumo_set_uvd_clocks()
1172 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS); sumo_set_uvd_clocks()
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) evergreen_set_uvd_clocks() argument
1204 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, evergreen_set_uvd_clocks()
1222 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); evergreen_set_uvd_clocks()
1259 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); evergreen_set_uvd_clocks()
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev) evergreen_fix_pci_max_read_req_size() argument
1278 readrq = pcie_get_readrq(rdev->pdev); evergreen_fix_pci_max_read_req_size()
1284 pcie_set_readrq(rdev->pdev, 512); evergreen_fix_pci_max_read_req_size()
1290 struct radeon_device *rdev = dev->dev_private; dce4_program_fmt() local
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc) dce4_is_in_vblank() argument
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc) dce4_is_counter_moving() argument
1367 * @rdev: radeon_device pointer
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc) dce4_wait_for_vblank() argument
1376 if (crtc >= rdev->num_crtc) dce4_wait_for_vblank()
1385 while (dce4_is_in_vblank(rdev, crtc)) { dce4_wait_for_vblank()
1387 if (!dce4_is_counter_moving(rdev, crtc)) dce4_wait_for_vblank()
1392 while (!dce4_is_in_vblank(rdev, crtc)) { dce4_wait_for_vblank()
1394 if (!dce4_is_counter_moving(rdev, crtc)) dce4_wait_for_vblank()
1403 * @rdev: radeon_device pointer
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base) evergreen_page_flip() argument
1412 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; evergreen_page_flip()
1426 * @rdev: radeon_device pointer
1431 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id) evergreen_page_flip_pending() argument
1433 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; evergreen_page_flip_pending()
1441 int evergreen_get_temp(struct radeon_device *rdev) evergreen_get_temp() argument
1446 if (rdev->family == CHIP_JUNIPER) { evergreen_get_temp()
1479 int sumo_get_temp(struct radeon_device *rdev) sumo_get_temp() argument
1490 * @rdev: radeon_device pointer
1496 void sumo_pm_init_profile(struct radeon_device *rdev) sumo_pm_init_profile() argument
1501 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; sumo_pm_init_profile()
1502 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; sumo_pm_init_profile()
1503 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1504 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; sumo_pm_init_profile()
1507 if (rdev->flags & RADEON_IS_MOBILITY) sumo_pm_init_profile()
1508 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0); sumo_pm_init_profile()
1510 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); sumo_pm_init_profile()
1512 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1513 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1514 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1515 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; sumo_pm_init_profile()
1517 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1518 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1519 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1520 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; sumo_pm_init_profile()
1522 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1523 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1524 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1525 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; sumo_pm_init_profile()
1527 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1528 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1529 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1530 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; sumo_pm_init_profile()
1533 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); sumo_pm_init_profile()
1534 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1535 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1536 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1537 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = sumo_pm_init_profile()
1538 rdev->pm.power_state[idx].num_clock_modes - 1; sumo_pm_init_profile()
1540 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx; sumo_pm_init_profile()
1541 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx; sumo_pm_init_profile()
1542 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; sumo_pm_init_profile()
1543 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = sumo_pm_init_profile()
1544 rdev->pm.power_state[idx].num_clock_modes - 1; sumo_pm_init_profile()
1550 * @rdev: radeon_device pointer
1556 void btc_pm_init_profile(struct radeon_device *rdev) btc_pm_init_profile() argument
1561 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; btc_pm_init_profile()
1562 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; btc_pm_init_profile()
1563 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1564 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2; btc_pm_init_profile()
1569 if (rdev->flags & RADEON_IS_MOBILITY) btc_pm_init_profile()
1570 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0); btc_pm_init_profile()
1572 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); btc_pm_init_profile()
1574 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1575 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1576 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1577 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; btc_pm_init_profile()
1579 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1580 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1581 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1582 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1; btc_pm_init_profile()
1584 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1585 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1586 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1587 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2; btc_pm_init_profile()
1589 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1590 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1591 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1592 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; btc_pm_init_profile()
1594 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1595 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1596 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1597 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1; btc_pm_init_profile()
1599 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx; btc_pm_init_profile()
1600 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx; btc_pm_init_profile()
1601 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; btc_pm_init_profile()
1602 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2; btc_pm_init_profile()
1608 * @rdev: radeon_device pointer
1613 void evergreen_pm_misc(struct radeon_device *rdev) evergreen_pm_misc() argument
1615 int req_ps_idx = rdev->pm.requested_power_state_index; evergreen_pm_misc()
1616 int req_cm_idx = rdev->pm.requested_clock_mode_index; evergreen_pm_misc()
1617 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; evergreen_pm_misc()
1624 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) { evergreen_pm_misc()
1625 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC); evergreen_pm_misc()
1626 rdev->pm.current_vddc = voltage->voltage; evergreen_pm_misc()
1634 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) && evergreen_pm_misc()
1635 (rdev->family >= CHIP_BARTS) && evergreen_pm_misc()
1636 rdev->pm.active_crtc_count && evergreen_pm_misc()
1637 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) || evergreen_pm_misc()
1638 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX))) evergreen_pm_misc()
1639 voltage = &rdev->pm.power_state[req_ps_idx]. evergreen_pm_misc()
1640 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage; evergreen_pm_misc()
1645 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) { evergreen_pm_misc()
1646 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI); evergreen_pm_misc()
1647 rdev->pm.current_vddci = voltage->vddci; evergreen_pm_misc()
1656 * @rdev: radeon_device pointer
1660 void evergreen_pm_prepare(struct radeon_device *rdev) evergreen_pm_prepare() argument
1662 struct drm_device *ddev = rdev->ddev; evergreen_pm_prepare()
1681 * @rdev: radeon_device pointer
1685 void evergreen_pm_finish(struct radeon_device *rdev) evergreen_pm_finish() argument
1687 struct drm_device *ddev = rdev->ddev; evergreen_pm_finish()
1706 * @rdev: radeon_device pointer
1712 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) evergreen_hpd_sense() argument
1751 * @rdev: radeon_device pointer
1756 void evergreen_hpd_set_polarity(struct radeon_device *rdev, evergreen_hpd_set_polarity() argument
1760 bool connected = evergreen_hpd_sense(rdev, hpd); evergreen_hpd_set_polarity()
1819 * @rdev: radeon_device pointer
1824 void evergreen_hpd_init(struct radeon_device *rdev) evergreen_hpd_init() argument
1826 struct drm_device *dev = rdev->ddev; evergreen_hpd_init()
1866 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd); evergreen_hpd_init()
1869 radeon_irq_kms_enable_hpd(rdev, enabled); evergreen_hpd_init()
1875 * @rdev: radeon_device pointer
1880 void evergreen_hpd_fini(struct radeon_device *rdev) evergreen_hpd_fini() argument
1882 struct drm_device *dev = rdev->ddev; evergreen_hpd_fini()
1912 radeon_irq_kms_disable_hpd(rdev, disabled); evergreen_hpd_fini()
1917 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev, evergreen_line_buffer_adjust() argument
1963 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) { evergreen_line_buffer_adjust()
1966 for (i = 0; i < rdev->usec_timeout; i++) { evergreen_line_buffer_adjust()
1979 if (ASIC_IS_DCE5(rdev)) evergreen_line_buffer_adjust()
1985 if (ASIC_IS_DCE5(rdev)) evergreen_line_buffer_adjust()
1991 if (ASIC_IS_DCE5(rdev)) evergreen_line_buffer_adjust()
1997 if (ASIC_IS_DCE5(rdev)) evergreen_line_buffer_adjust()
2008 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev) evergreen_get_number_of_dram_channels() argument
2252 static void evergreen_program_watermarks(struct radeon_device *rdev, evergreen_program_watermarks() argument
2274 dram_channels = evergreen_get_number_of_dram_channels(rdev); evergreen_program_watermarks()
2277 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { evergreen_program_watermarks()
2279 radeon_dpm_get_mclk(rdev, false) * 10; evergreen_program_watermarks()
2281 radeon_dpm_get_sclk(rdev, false) * 10; evergreen_program_watermarks()
2283 wm_high.yclk = rdev->pm.current_mclk * 10; evergreen_program_watermarks()
2284 wm_high.sclk = rdev->pm.current_sclk * 10; evergreen_program_watermarks()
2304 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { evergreen_program_watermarks()
2306 radeon_dpm_get_mclk(rdev, true) * 10; evergreen_program_watermarks()
2308 radeon_dpm_get_sclk(rdev, true) * 10; evergreen_program_watermarks()
2310 wm_low.yclk = rdev->pm.current_mclk * 10; evergreen_program_watermarks()
2311 wm_low.sclk = rdev->pm.current_sclk * 10; evergreen_program_watermarks()
2340 (rdev->disp_priority == 2)) { evergreen_program_watermarks()
2347 (rdev->disp_priority == 2)) { evergreen_program_watermarks()
2413 * @rdev: radeon_device pointer
2418 void evergreen_bandwidth_update(struct radeon_device *rdev) evergreen_bandwidth_update() argument
2425 if (!rdev->mode_info.mode_config_initialized) evergreen_bandwidth_update()
2428 radeon_update_display_priority(rdev); evergreen_bandwidth_update()
2430 for (i = 0; i < rdev->num_crtc; i++) { evergreen_bandwidth_update()
2431 if (rdev->mode_info.crtcs[i]->base.enabled) evergreen_bandwidth_update()
2434 for (i = 0; i < rdev->num_crtc; i += 2) { evergreen_bandwidth_update()
2435 mode0 = &rdev->mode_info.crtcs[i]->base.mode; evergreen_bandwidth_update()
2436 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; evergreen_bandwidth_update()
2437 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); evergreen_bandwidth_update()
2438 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); evergreen_bandwidth_update()
2439 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); evergreen_bandwidth_update()
2440 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); evergreen_bandwidth_update()
2447 * @rdev: radeon_device pointer
2453 int evergreen_mc_wait_for_idle(struct radeon_device *rdev) evergreen_mc_wait_for_idle() argument
2458 for (i = 0; i < rdev->usec_timeout; i++) { evergreen_mc_wait_for_idle()
2471 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev) evergreen_pcie_gart_tlb_flush() argument
2479 for (i = 0; i < rdev->usec_timeout; i++) { evergreen_pcie_gart_tlb_flush()
2494 static int evergreen_pcie_gart_enable(struct radeon_device *rdev) evergreen_pcie_gart_enable() argument
2499 if (rdev->gart.robj == NULL) { evergreen_pcie_gart_enable()
2500 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); evergreen_pcie_gart_enable()
2503 r = radeon_gart_table_vram_pin(rdev); evergreen_pcie_gart_enable()
2517 if (rdev->flags & RADEON_IS_IGP) { evergreen_pcie_gart_enable()
2525 if ((rdev->family == CHIP_JUNIPER) || evergreen_pcie_gart_enable()
2526 (rdev->family == CHIP_CYPRESS) || evergreen_pcie_gart_enable()
2527 (rdev->family == CHIP_HEMLOCK) || evergreen_pcie_gart_enable()
2528 (rdev->family == CHIP_BARTS)) evergreen_pcie_gart_enable()
2535 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); evergreen_pcie_gart_enable()
2536 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); evergreen_pcie_gart_enable()
2537 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); evergreen_pcie_gart_enable()
2541 (u32)(rdev->dummy_page.addr >> 12)); evergreen_pcie_gart_enable()
2544 evergreen_pcie_gart_tlb_flush(rdev); evergreen_pcie_gart_enable()
2546 (unsigned)(rdev->mc.gtt_size >> 20), evergreen_pcie_gart_enable()
2547 (unsigned long long)rdev->gart.table_addr); evergreen_pcie_gart_enable()
2548 rdev->gart.ready = true; evergreen_pcie_gart_enable()
2552 static void evergreen_pcie_gart_disable(struct radeon_device *rdev) evergreen_pcie_gart_disable() argument
2574 radeon_gart_table_vram_unpin(rdev); evergreen_pcie_gart_disable()
2577 static void evergreen_pcie_gart_fini(struct radeon_device *rdev) evergreen_pcie_gart_fini() argument
2579 evergreen_pcie_gart_disable(rdev); evergreen_pcie_gart_fini()
2580 radeon_gart_table_vram_free(rdev); evergreen_pcie_gart_fini()
2581 radeon_gart_fini(rdev); evergreen_pcie_gart_fini()
2585 static void evergreen_agp_enable(struct radeon_device *rdev) evergreen_agp_enable() argument
2652 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev, evergreen_is_dp_sst_stream_enabled() argument
2712 static void evergreen_blank_dp_output(struct radeon_device *rdev, evergreen_blank_dp_output() argument
2752 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save) evergreen_mc_stop() argument
2758 if (!ASIC_IS_NODCE(rdev)) { evergreen_mc_stop()
2766 for (i = 0; i < rdev->num_crtc; i++) { evergreen_mc_stop()
2770 if (ASIC_IS_DCE6(rdev)) { evergreen_mc_stop()
2773 radeon_wait_for_vblank(rdev, i); evergreen_mc_stop()
2782 radeon_wait_for_vblank(rdev, i); evergreen_mc_stop()
2790 frame_count = radeon_get_vblank_counter(rdev, i); evergreen_mc_stop()
2791 for (j = 0; j < rdev->usec_timeout; j++) { evergreen_mc_stop()
2792 if (radeon_get_vblank_counter(rdev, i) != frame_count) evergreen_mc_stop()
2803 if (ASIC_IS_DCE5(rdev) && evergreen_mc_stop()
2804 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe)) evergreen_mc_stop()
2805 evergreen_blank_dp_output(rdev, dig_fe); evergreen_mc_stop()
2820 radeon_mc_wait_for_idle(rdev); evergreen_mc_stop()
2834 for (i = 0; i < rdev->num_crtc; i++) { evergreen_mc_stop()
2850 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save) evergreen_mc_resume() argument
2856 for (i = 0; i < rdev->num_crtc; i++) { evergreen_mc_resume()
2858 upper_32_bits(rdev->mc.vram_start)); evergreen_mc_resume()
2860 upper_32_bits(rdev->mc.vram_start)); evergreen_mc_resume()
2862 (u32)rdev->mc.vram_start); evergreen_mc_resume()
2864 (u32)rdev->mc.vram_start); evergreen_mc_resume()
2867 if (!ASIC_IS_NODCE(rdev)) { evergreen_mc_resume()
2868 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start)); evergreen_mc_resume()
2869 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); evergreen_mc_resume()
2873 for (i = 0; i < rdev->num_crtc; i++) { evergreen_mc_resume()
2891 for (j = 0; j < rdev->usec_timeout; j++) { evergreen_mc_resume()
2907 for (i = 0; i < rdev->num_crtc; i++) { evergreen_mc_resume()
2909 if (ASIC_IS_DCE6(rdev)) { evergreen_mc_resume()
2923 frame_count = radeon_get_vblank_counter(rdev, i); evergreen_mc_resume()
2924 for (j = 0; j < rdev->usec_timeout; j++) { evergreen_mc_resume()
2925 if (radeon_get_vblank_counter(rdev, i) != frame_count) evergreen_mc_resume()
2931 if (!ASIC_IS_NODCE(rdev)) { evergreen_mc_resume()
2939 void evergreen_mc_program(struct radeon_device *rdev) evergreen_mc_program() argument
2955 evergreen_mc_stop(rdev, &save); evergreen_mc_program()
2956 if (evergreen_mc_wait_for_idle(rdev)) { evergreen_mc_program()
2957 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); evergreen_mc_program()
2962 if (rdev->flags & RADEON_IS_AGP) { evergreen_mc_program()
2963 if (rdev->mc.vram_start < rdev->mc.gtt_start) { evergreen_mc_program()
2966 rdev->mc.vram_start >> 12); evergreen_mc_program()
2968 rdev->mc.gtt_end >> 12); evergreen_mc_program()
2972 rdev->mc.gtt_start >> 12); evergreen_mc_program()
2974 rdev->mc.vram_end >> 12); evergreen_mc_program()
2978 rdev->mc.vram_start >> 12); evergreen_mc_program()
2980 rdev->mc.vram_end >> 12); evergreen_mc_program()
2982 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); evergreen_mc_program()
2984 if ((rdev->family == CHIP_PALM) || evergreen_mc_program()
2985 (rdev->family == CHIP_SUMO) || evergreen_mc_program()
2986 (rdev->family == CHIP_SUMO2)) { evergreen_mc_program()
2988 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24; evergreen_mc_program()
2989 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20; evergreen_mc_program()
2992 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; evergreen_mc_program()
2993 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); evergreen_mc_program()
2995 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); evergreen_mc_program()
2998 if (rdev->flags & RADEON_IS_AGP) { evergreen_mc_program()
2999 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); evergreen_mc_program()
3000 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); evergreen_mc_program()
3001 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); evergreen_mc_program()
3007 if (evergreen_mc_wait_for_idle(rdev)) { evergreen_mc_program()
3008 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); evergreen_mc_program()
3010 evergreen_mc_resume(rdev, &save); evergreen_mc_program()
3013 rv515_vga_render_disable(rdev); evergreen_mc_program()
3019 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) evergreen_ring_ib_execute() argument
3021 struct radeon_ring *ring = &rdev->ring[ib->ring]; evergreen_ring_ib_execute()
3034 } else if (rdev->wb.enabled) { evergreen_ring_ib_execute()
3054 static int evergreen_cp_load_microcode(struct radeon_device *rdev) evergreen_cp_load_microcode() argument
3059 if (!rdev->me_fw || !rdev->pfp_fw) evergreen_cp_load_microcode()
3062 r700_cp_stop(rdev); evergreen_cp_load_microcode()
3069 fw_data = (const __be32 *)rdev->pfp_fw->data; evergreen_cp_load_microcode()
3075 fw_data = (const __be32 *)rdev->me_fw->data; evergreen_cp_load_microcode()
3086 static int evergreen_cp_start(struct radeon_device *rdev) evergreen_cp_start() argument
3088 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; evergreen_cp_start()
3092 r = radeon_ring_lock(rdev, ring, 7); evergreen_cp_start()
3100 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1); evergreen_cp_start()
3104 radeon_ring_unlock_commit(rdev, ring, false); evergreen_cp_start()
3109 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19); evergreen_cp_start()
3147 radeon_ring_unlock_commit(rdev, ring, false); evergreen_cp_start()
3152 static int evergreen_cp_resume(struct radeon_device *rdev) evergreen_cp_resume() argument
3154 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; evergreen_cp_resume()
3192 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC)); evergreen_cp_resume()
3193 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); evergreen_cp_resume()
3194 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); evergreen_cp_resume()
3196 if (rdev->wb.enabled) evergreen_cp_resume()
3209 evergreen_cp_start(rdev); evergreen_cp_resume()
3211 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring); evergreen_cp_resume()
3222 static void evergreen_gpu_init(struct radeon_device *rdev) evergreen_gpu_init() argument
3243 switch (rdev->family) { evergreen_gpu_init()
3246 rdev->config.evergreen.num_ses = 2; evergreen_gpu_init()
3247 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3248 rdev->config.evergreen.max_tile_pipes = 8; evergreen_gpu_init()
3249 rdev->config.evergreen.max_simds = 10; evergreen_gpu_init()
3250 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3251 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3252 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3253 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3254 rdev->config.evergreen.max_stack_entries = 512; evergreen_gpu_init()
3255 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3256 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3257 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3258 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3259 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3260 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3262 rdev->config.evergreen.sc_prim_fifo_size = 0x100; evergreen_gpu_init()
3263 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3264 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3268 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3269 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3270 rdev->config.evergreen.max_tile_pipes = 4; evergreen_gpu_init()
3271 rdev->config.evergreen.max_simds = 10; evergreen_gpu_init()
3272 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3273 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3274 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3275 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3276 rdev->config.evergreen.max_stack_entries = 512; evergreen_gpu_init()
3277 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3278 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3279 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3280 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3281 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3282 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3284 rdev->config.evergreen.sc_prim_fifo_size = 0x100; evergreen_gpu_init()
3285 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3286 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3290 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3291 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3292 rdev->config.evergreen.max_tile_pipes = 4; evergreen_gpu_init()
3293 rdev->config.evergreen.max_simds = 5; evergreen_gpu_init()
3294 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3295 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3296 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3297 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3298 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3299 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3300 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3301 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3302 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3303 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3304 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3306 rdev->config.evergreen.sc_prim_fifo_size = 0x100; evergreen_gpu_init()
3307 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3308 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3313 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3314 rdev->config.evergreen.max_pipes = 2; evergreen_gpu_init()
3315 rdev->config.evergreen.max_tile_pipes = 2; evergreen_gpu_init()
3316 rdev->config.evergreen.max_simds = 2; evergreen_gpu_init()
3317 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3318 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3319 rdev->config.evergreen.max_threads = 192; evergreen_gpu_init()
3320 rdev->config.evergreen.max_gs_threads = 16; evergreen_gpu_init()
3321 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3322 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3323 rdev->config.evergreen.sx_max_export_size = 128; evergreen_gpu_init()
3324 rdev->config.evergreen.sx_max_export_pos_size = 32; evergreen_gpu_init()
3325 rdev->config.evergreen.sx_max_export_smx_size = 96; evergreen_gpu_init()
3326 rdev->config.evergreen.max_hw_contexts = 4; evergreen_gpu_init()
3327 rdev->config.evergreen.sq_num_cf_insts = 1; evergreen_gpu_init()
3329 rdev->config.evergreen.sc_prim_fifo_size = 0x40; evergreen_gpu_init()
3330 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3331 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3335 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3336 rdev->config.evergreen.max_pipes = 2; evergreen_gpu_init()
3337 rdev->config.evergreen.max_tile_pipes = 2; evergreen_gpu_init()
3338 rdev->config.evergreen.max_simds = 2; evergreen_gpu_init()
3339 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3340 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3341 rdev->config.evergreen.max_threads = 192; evergreen_gpu_init()
3342 rdev->config.evergreen.max_gs_threads = 16; evergreen_gpu_init()
3343 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3344 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3345 rdev->config.evergreen.sx_max_export_size = 128; evergreen_gpu_init()
3346 rdev->config.evergreen.sx_max_export_pos_size = 32; evergreen_gpu_init()
3347 rdev->config.evergreen.sx_max_export_smx_size = 96; evergreen_gpu_init()
3348 rdev->config.evergreen.max_hw_contexts = 4; evergreen_gpu_init()
3349 rdev->config.evergreen.sq_num_cf_insts = 1; evergreen_gpu_init()
3351 rdev->config.evergreen.sc_prim_fifo_size = 0x40; evergreen_gpu_init()
3352 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3353 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3357 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3358 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3359 rdev->config.evergreen.max_tile_pipes = 4; evergreen_gpu_init()
3360 if (rdev->pdev->device == 0x9648) evergreen_gpu_init()
3361 rdev->config.evergreen.max_simds = 3; evergreen_gpu_init()
3362 else if ((rdev->pdev->device == 0x9647) || evergreen_gpu_init()
3363 (rdev->pdev->device == 0x964a)) evergreen_gpu_init()
3364 rdev->config.evergreen.max_simds = 4; evergreen_gpu_init()
3366 rdev->config.evergreen.max_simds = 5; evergreen_gpu_init()
3367 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3368 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3369 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3370 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3371 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3372 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3373 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3374 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3375 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3376 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3377 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3379 rdev->config.evergreen.sc_prim_fifo_size = 0x40; evergreen_gpu_init()
3380 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3381 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3385 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3386 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3387 rdev->config.evergreen.max_tile_pipes = 4; evergreen_gpu_init()
3388 rdev->config.evergreen.max_simds = 2; evergreen_gpu_init()
3389 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3390 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3391 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3392 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3393 rdev->config.evergreen.max_stack_entries = 512; evergreen_gpu_init()
3394 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3395 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3396 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3397 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3398 rdev->config.evergreen.max_hw_contexts = 4; evergreen_gpu_init()
3399 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3401 rdev->config.evergreen.sc_prim_fifo_size = 0x40; evergreen_gpu_init()
3402 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3403 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3407 rdev->config.evergreen.num_ses = 2; evergreen_gpu_init()
3408 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3409 rdev->config.evergreen.max_tile_pipes = 8; evergreen_gpu_init()
3410 rdev->config.evergreen.max_simds = 7; evergreen_gpu_init()
3411 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3412 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3413 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3414 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3415 rdev->config.evergreen.max_stack_entries = 512; evergreen_gpu_init()
3416 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3417 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3418 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3419 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3420 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3421 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3423 rdev->config.evergreen.sc_prim_fifo_size = 0x100; evergreen_gpu_init()
3424 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3425 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3429 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3430 rdev->config.evergreen.max_pipes = 4; evergreen_gpu_init()
3431 rdev->config.evergreen.max_tile_pipes = 4; evergreen_gpu_init()
3432 rdev->config.evergreen.max_simds = 6; evergreen_gpu_init()
3433 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3434 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3435 rdev->config.evergreen.max_threads = 248; evergreen_gpu_init()
3436 rdev->config.evergreen.max_gs_threads = 32; evergreen_gpu_init()
3437 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3438 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3439 rdev->config.evergreen.sx_max_export_size = 256; evergreen_gpu_init()
3440 rdev->config.evergreen.sx_max_export_pos_size = 64; evergreen_gpu_init()
3441 rdev->config.evergreen.sx_max_export_smx_size = 192; evergreen_gpu_init()
3442 rdev->config.evergreen.max_hw_contexts = 8; evergreen_gpu_init()
3443 rdev->config.evergreen.sq_num_cf_insts = 2; evergreen_gpu_init()
3445 rdev->config.evergreen.sc_prim_fifo_size = 0x100; evergreen_gpu_init()
3446 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3447 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3451 rdev->config.evergreen.num_ses = 1; evergreen_gpu_init()
3452 rdev->config.evergreen.max_pipes = 2; evergreen_gpu_init()
3453 rdev->config.evergreen.max_tile_pipes = 2; evergreen_gpu_init()
3454 rdev->config.evergreen.max_simds = 2; evergreen_gpu_init()
3455 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; evergreen_gpu_init()
3456 rdev->config.evergreen.max_gprs = 256; evergreen_gpu_init()
3457 rdev->config.evergreen.max_threads = 192; evergreen_gpu_init()
3458 rdev->config.evergreen.max_gs_threads = 16; evergreen_gpu_init()
3459 rdev->config.evergreen.max_stack_entries = 256; evergreen_gpu_init()
3460 rdev->config.evergreen.sx_num_of_sets = 4; evergreen_gpu_init()
3461 rdev->config.evergreen.sx_max_export_size = 128; evergreen_gpu_init()
3462 rdev->config.evergreen.sx_max_export_pos_size = 32; evergreen_gpu_init()
3463 rdev->config.evergreen.sx_max_export_smx_size = 96; evergreen_gpu_init()
3464 rdev->config.evergreen.max_hw_contexts = 4; evergreen_gpu_init()
3465 rdev->config.evergreen.sq_num_cf_insts = 1; evergreen_gpu_init()
3467 rdev->config.evergreen.sc_prim_fifo_size = 0x40; evergreen_gpu_init()
3468 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; evergreen_gpu_init()
3469 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; evergreen_gpu_init()
3487 evergreen_fix_pci_max_read_req_size(rdev); evergreen_gpu_init()
3490 if ((rdev->family == CHIP_PALM) || evergreen_gpu_init()
3491 (rdev->family == CHIP_SUMO) || evergreen_gpu_init()
3492 (rdev->family == CHIP_SUMO2)) evergreen_gpu_init()
3504 rdev->config.evergreen.tile_config = 0; evergreen_gpu_init()
3505 switch (rdev->config.evergreen.max_tile_pipes) { evergreen_gpu_init()
3508 rdev->config.evergreen.tile_config |= (0 << 0); evergreen_gpu_init()
3511 rdev->config.evergreen.tile_config |= (1 << 0); evergreen_gpu_init()
3514 rdev->config.evergreen.tile_config |= (2 << 0); evergreen_gpu_init()
3517 rdev->config.evergreen.tile_config |= (3 << 0); evergreen_gpu_init()
3521 if (rdev->flags & RADEON_IS_IGP) evergreen_gpu_init()
3522 rdev->config.evergreen.tile_config |= 1 << 4; evergreen_gpu_init()
3526 rdev->config.evergreen.tile_config |= 0 << 4; evergreen_gpu_init()
3529 rdev->config.evergreen.tile_config |= 1 << 4; evergreen_gpu_init()
3533 rdev->config.evergreen.tile_config |= 2 << 4; evergreen_gpu_init()
3537 rdev->config.evergreen.tile_config |= 0 << 8; evergreen_gpu_init()
3538 rdev->config.evergreen.tile_config |= evergreen_gpu_init()
3541 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) { evergreen_gpu_init()
3551 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) { evergreen_gpu_init()
3564 for (i = 0; i < rdev->config.evergreen.max_backends; i++) evergreen_gpu_init()
3568 for (i = 0; i < rdev->config.evergreen.max_backends; i++) evergreen_gpu_init()
3572 for (i = 0; i < rdev->config.evergreen.num_ses; i++) { evergreen_gpu_init()
3578 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds; evergreen_gpu_init()
3582 rdev->config.evergreen.active_simds = hweight32(~tmp); evergreen_gpu_init()
3595 if ((rdev->config.evergreen.max_backends == 1) && evergreen_gpu_init()
3596 (rdev->flags & RADEON_IS_IGP)) { evergreen_gpu_init()
3606 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends, evergreen_gpu_init()
3634 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets); evergreen_gpu_init()
3637 if (rdev->family <= CHIP_SUMO2) evergreen_gpu_init()
3640 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) | evergreen_gpu_init()
3641 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) | evergreen_gpu_init()
3642 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1))); evergreen_gpu_init()
3644 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) | evergreen_gpu_init()
3645 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) | evergreen_gpu_init()
3646 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size))); evergreen_gpu_init()
3653 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) | evergreen_gpu_init()
3670 switch (rdev->family) { evergreen_gpu_init()
3685 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32); evergreen_gpu_init()
3686 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32); evergreen_gpu_init()
3688 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); evergreen_gpu_init()
3689 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); evergreen_gpu_init()
3690 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); evergreen_gpu_init()
3691 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); evergreen_gpu_init()
3693 switch (rdev->family) { evergreen_gpu_init()
3706 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8); evergreen_gpu_init()
3707 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8); evergreen_gpu_init()
3708 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8); evergreen_gpu_init()
3709 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8); evergreen_gpu_init()
3710 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8); evergreen_gpu_init()
3712 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3713 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3714 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3715 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3716 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3717 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6); evergreen_gpu_init()
3734 switch (rdev->family) { evergreen_gpu_init()
3798 int evergreen_mc_init(struct radeon_device *rdev) evergreen_mc_init() argument
3804 rdev->mc.vram_is_ddr = true; evergreen_mc_init()
3805 if ((rdev->family == CHIP_PALM) || evergreen_mc_init()
3806 (rdev->family == CHIP_SUMO) || evergreen_mc_init()
3807 (rdev->family == CHIP_SUMO2)) evergreen_mc_init()
3834 rdev->mc.vram_width = numchan * chansize; evergreen_mc_init()
3836 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); evergreen_mc_init()
3837 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); evergreen_mc_init()
3839 if ((rdev->family == CHIP_PALM) || evergreen_mc_init()
3840 (rdev->family == CHIP_SUMO) || evergreen_mc_init()
3841 (rdev->family == CHIP_SUMO2)) { evergreen_mc_init()
3843 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); evergreen_mc_init()
3844 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); evergreen_mc_init()
3847 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; evergreen_mc_init()
3848 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; evergreen_mc_init()
3850 rdev->mc.visible_vram_size = rdev->mc.aper_size; evergreen_mc_init()
3851 r700_vram_gtt_location(rdev, &rdev->mc); evergreen_mc_init()
3852 radeon_update_bandwidth_info(rdev); evergreen_mc_init()
3857 void evergreen_print_gpu_status_regs(struct radeon_device *rdev) evergreen_print_gpu_status_regs() argument
3859 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n", evergreen_print_gpu_status_regs()
3861 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n", evergreen_print_gpu_status_regs()
3863 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n", evergreen_print_gpu_status_regs()
3865 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n", evergreen_print_gpu_status_regs()
3867 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n", evergreen_print_gpu_status_regs()
3869 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n", evergreen_print_gpu_status_regs()
3871 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n", evergreen_print_gpu_status_regs()
3873 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n", evergreen_print_gpu_status_regs()
3875 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n", evergreen_print_gpu_status_regs()
3877 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n", evergreen_print_gpu_status_regs()
3879 if (rdev->family >= CHIP_CAYMAN) { evergreen_print_gpu_status_regs()
3880 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n", evergreen_print_gpu_status_regs()
3885 bool evergreen_is_display_hung(struct radeon_device *rdev) evergreen_is_display_hung() argument
3891 for (i = 0; i < rdev->num_crtc; i++) { evergreen_is_display_hung()
3899 for (i = 0; i < rdev->num_crtc; i++) { evergreen_is_display_hung()
3914 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev) evergreen_gpu_check_soft_reset() argument
3966 if (evergreen_is_display_hung(rdev)) evergreen_gpu_check_soft_reset()
3983 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) evergreen_gpu_soft_reset() argument
3992 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); evergreen_gpu_soft_reset()
3994 evergreen_print_gpu_status_regs(rdev); evergreen_gpu_soft_reset()
4008 evergreen_mc_stop(rdev, &save); evergreen_gpu_soft_reset()
4009 if (evergreen_mc_wait_for_idle(rdev)) { evergreen_gpu_soft_reset()
4010 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); evergreen_gpu_soft_reset()
4055 if (!(rdev->flags & RADEON_IS_IGP)) { evergreen_gpu_soft_reset()
4063 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); evergreen_gpu_soft_reset()
4077 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); evergreen_gpu_soft_reset()
4091 evergreen_mc_resume(rdev, &save); evergreen_gpu_soft_reset()
4094 evergreen_print_gpu_status_regs(rdev); evergreen_gpu_soft_reset()
4097 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev) evergreen_gpu_pci_config_reset() argument
4102 dev_info(rdev->dev, "GPU pci config reset\n"); evergreen_gpu_pci_config_reset()
4116 r600_rlc_stop(rdev); evergreen_gpu_pci_config_reset()
4121 rv770_set_clk_bypass_mode(rdev); evergreen_gpu_pci_config_reset()
4123 pci_clear_master(rdev->pdev); evergreen_gpu_pci_config_reset()
4125 evergreen_mc_stop(rdev, &save); evergreen_gpu_pci_config_reset()
4126 if (evergreen_mc_wait_for_idle(rdev)) { evergreen_gpu_pci_config_reset()
4127 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); evergreen_gpu_pci_config_reset()
4130 radeon_pci_config_reset(rdev); evergreen_gpu_pci_config_reset()
4132 for (i = 0; i < rdev->usec_timeout; i++) { evergreen_gpu_pci_config_reset()
4139 int evergreen_asic_reset(struct radeon_device *rdev) evergreen_asic_reset() argument
4143 reset_mask = evergreen_gpu_check_soft_reset(rdev); evergreen_asic_reset()
4146 r600_set_bios_scratch_engine_hung(rdev, true); evergreen_asic_reset()
4149 evergreen_gpu_soft_reset(rdev, reset_mask); evergreen_asic_reset()
4151 reset_mask = evergreen_gpu_check_soft_reset(rdev); evergreen_asic_reset()
4155 evergreen_gpu_pci_config_reset(rdev); evergreen_asic_reset()
4157 reset_mask = evergreen_gpu_check_soft_reset(rdev); evergreen_asic_reset()
4160 r600_set_bios_scratch_engine_hung(rdev, false); evergreen_asic_reset()
4168 * @rdev: radeon_device pointer
4174 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) evergreen_gfx_is_lockup() argument
4176 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev); evergreen_gfx_is_lockup()
4181 radeon_ring_lockup_update(rdev, ring); evergreen_gfx_is_lockup()
4184 return radeon_ring_test_lockup(rdev, ring); evergreen_gfx_is_lockup()
4193 void sumo_rlc_fini(struct radeon_device *rdev) sumo_rlc_fini() argument
4198 if (rdev->rlc.save_restore_obj) { sumo_rlc_fini()
4199 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false); sumo_rlc_fini()
4201 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r); sumo_rlc_fini()
4202 radeon_bo_unpin(rdev->rlc.save_restore_obj); sumo_rlc_fini()
4203 radeon_bo_unreserve(rdev->rlc.save_restore_obj); sumo_rlc_fini()
4205 radeon_bo_unref(&rdev->rlc.save_restore_obj); sumo_rlc_fini()
4206 rdev->rlc.save_restore_obj = NULL; sumo_rlc_fini()
4210 if (rdev->rlc.clear_state_obj) { sumo_rlc_fini()
4211 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false); sumo_rlc_fini()
4213 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r); sumo_rlc_fini()
4214 radeon_bo_unpin(rdev->rlc.clear_state_obj); sumo_rlc_fini()
4215 radeon_bo_unreserve(rdev->rlc.clear_state_obj); sumo_rlc_fini()
4217 radeon_bo_unref(&rdev->rlc.clear_state_obj); sumo_rlc_fini()
4218 rdev->rlc.clear_state_obj = NULL; sumo_rlc_fini()
4222 if (rdev->rlc.cp_table_obj) { sumo_rlc_fini()
4223 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false); sumo_rlc_fini()
4225 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r); sumo_rlc_fini()
4226 radeon_bo_unpin(rdev->rlc.cp_table_obj); sumo_rlc_fini()
4227 radeon_bo_unreserve(rdev->rlc.cp_table_obj); sumo_rlc_fini()
4229 radeon_bo_unref(&rdev->rlc.cp_table_obj); sumo_rlc_fini()
4230 rdev->rlc.cp_table_obj = NULL; sumo_rlc_fini()
4236 int sumo_rlc_init(struct radeon_device *rdev) sumo_rlc_init() argument
4246 src_ptr = rdev->rlc.reg_list; sumo_rlc_init()
4247 dws = rdev->rlc.reg_list_size; sumo_rlc_init()
4248 if (rdev->family >= CHIP_BONAIRE) { sumo_rlc_init()
4251 cs_data = rdev->rlc.cs_data; sumo_rlc_init()
4255 if (rdev->rlc.save_restore_obj == NULL) { sumo_rlc_init()
4256 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true, sumo_rlc_init()
4258 NULL, &rdev->rlc.save_restore_obj); sumo_rlc_init()
4260 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r); sumo_rlc_init()
4265 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false); sumo_rlc_init()
4267 sumo_rlc_fini(rdev); sumo_rlc_init()
4270 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM, sumo_rlc_init()
4271 &rdev->rlc.save_restore_gpu_addr); sumo_rlc_init()
4273 radeon_bo_unreserve(rdev->rlc.save_restore_obj); sumo_rlc_init()
4274 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r); sumo_rlc_init()
4275 sumo_rlc_fini(rdev); sumo_rlc_init()
4279 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr); sumo_rlc_init()
4281 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r); sumo_rlc_init()
4282 sumo_rlc_fini(rdev); sumo_rlc_init()
4286 dst_ptr = rdev->rlc.sr_ptr; sumo_rlc_init()
4287 if (rdev->family >= CHIP_TAHITI) { sumo_rlc_init()
4289 for (i = 0; i < rdev->rlc.reg_list_size; i++) sumo_rlc_init()
4309 radeon_bo_kunmap(rdev->rlc.save_restore_obj); sumo_rlc_init()
4310 radeon_bo_unreserve(rdev->rlc.save_restore_obj); sumo_rlc_init()
4315 if (rdev->family >= CHIP_BONAIRE) { sumo_rlc_init()
4316 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev); sumo_rlc_init()
4317 } else if (rdev->family >= CHIP_TAHITI) { sumo_rlc_init()
4318 rdev->rlc.clear_state_size = si_get_csb_size(rdev); sumo_rlc_init()
4319 dws = rdev->rlc.clear_state_size + (256 / 4); sumo_rlc_init()
4331 rdev->rlc.clear_state_size = dws; sumo_rlc_init()
4334 if (rdev->rlc.clear_state_obj == NULL) { sumo_rlc_init()
4335 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true, sumo_rlc_init()
4337 NULL, &rdev->rlc.clear_state_obj); sumo_rlc_init()
4339 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r); sumo_rlc_init()
4340 sumo_rlc_fini(rdev); sumo_rlc_init()
4344 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false); sumo_rlc_init()
4346 sumo_rlc_fini(rdev); sumo_rlc_init()
4349 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM, sumo_rlc_init()
4350 &rdev->rlc.clear_state_gpu_addr); sumo_rlc_init()
4352 radeon_bo_unreserve(rdev->rlc.clear_state_obj); sumo_rlc_init()
4353 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r); sumo_rlc_init()
4354 sumo_rlc_fini(rdev); sumo_rlc_init()
4358 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr); sumo_rlc_init()
4360 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r); sumo_rlc_init()
4361 sumo_rlc_fini(rdev); sumo_rlc_init()
4365 dst_ptr = rdev->rlc.cs_ptr; sumo_rlc_init()
4366 if (rdev->family >= CHIP_BONAIRE) { sumo_rlc_init()
4367 cik_get_csb_buffer(rdev, dst_ptr); sumo_rlc_init()
4368 } else if (rdev->family >= CHIP_TAHITI) { sumo_rlc_init()
4369 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256; sumo_rlc_init()
4372 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size); sumo_rlc_init()
4373 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]); sumo_rlc_init()
4376 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4); sumo_rlc_init()
4405 radeon_bo_kunmap(rdev->rlc.clear_state_obj); sumo_rlc_init()
4406 radeon_bo_unreserve(rdev->rlc.clear_state_obj); sumo_rlc_init()
4409 if (rdev->rlc.cp_table_size) { sumo_rlc_init()
4410 if (rdev->rlc.cp_table_obj == NULL) { sumo_rlc_init()
4411 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, sumo_rlc_init()
4414 NULL, &rdev->rlc.cp_table_obj); sumo_rlc_init()
4416 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r); sumo_rlc_init()
4417 sumo_rlc_fini(rdev); sumo_rlc_init()
4422 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false); sumo_rlc_init()
4424 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r); sumo_rlc_init()
4425 sumo_rlc_fini(rdev); sumo_rlc_init()
4428 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM, sumo_rlc_init()
4429 &rdev->rlc.cp_table_gpu_addr); sumo_rlc_init()
4431 radeon_bo_unreserve(rdev->rlc.cp_table_obj); sumo_rlc_init()
4432 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r); sumo_rlc_init()
4433 sumo_rlc_fini(rdev); sumo_rlc_init()
4436 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr); sumo_rlc_init()
4438 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r); sumo_rlc_init()
4439 sumo_rlc_fini(rdev); sumo_rlc_init()
4443 cik_init_cp_pg_table(rdev); sumo_rlc_init()
4445 radeon_bo_kunmap(rdev->rlc.cp_table_obj); sumo_rlc_init()
4446 radeon_bo_unreserve(rdev->rlc.cp_table_obj); sumo_rlc_init()
4453 static void evergreen_rlc_start(struct radeon_device *rdev) evergreen_rlc_start() argument
4457 if (rdev->flags & RADEON_IS_IGP) { evergreen_rlc_start()
4464 int evergreen_rlc_resume(struct radeon_device *rdev) evergreen_rlc_resume() argument
4469 if (!rdev->rlc_fw) evergreen_rlc_resume()
4472 r600_rlc_stop(rdev); evergreen_rlc_resume()
4476 if (rdev->flags & RADEON_IS_IGP) { evergreen_rlc_resume()
4477 if (rdev->family == CHIP_ARUBA) { evergreen_rlc_resume()
4479 3 | (3 << (16 * rdev->config.cayman.max_shader_engines)); evergreen_rlc_resume()
4482 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se; evergreen_rlc_resume()
4484 if (tmp == rdev->config.cayman.max_simds_per_se) { evergreen_rlc_resume()
4495 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); evergreen_rlc_resume()
4496 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); evergreen_rlc_resume()
4507 fw_data = (const __be32 *)rdev->rlc_fw->data; evergreen_rlc_resume()
4508 if (rdev->family >= CHIP_ARUBA) { evergreen_rlc_resume()
4513 } else if (rdev->family >= CHIP_CAYMAN) { evergreen_rlc_resume()
4526 evergreen_rlc_start(rdev); evergreen_rlc_resume()
4533 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc) evergreen_get_vblank_counter() argument
4535 if (crtc >= rdev->num_crtc) evergreen_get_vblank_counter()
4541 void evergreen_disable_interrupt_state(struct radeon_device *rdev) evergreen_disable_interrupt_state() argument
4545 if (rdev->family >= CHIP_CAYMAN) { evergreen_disable_interrupt_state()
4546 cayman_cp_int_cntl_setup(rdev, 0, evergreen_disable_interrupt_state()
4548 cayman_cp_int_cntl_setup(rdev, 1, 0); evergreen_disable_interrupt_state()
4549 cayman_cp_int_cntl_setup(rdev, 2, 0); evergreen_disable_interrupt_state()
4560 if (rdev->num_crtc >= 4) { evergreen_disable_interrupt_state()
4564 if (rdev->num_crtc >= 6) { evergreen_disable_interrupt_state()
4571 if (rdev->num_crtc >= 4) { evergreen_disable_interrupt_state()
4575 if (rdev->num_crtc >= 6) { evergreen_disable_interrupt_state()
4581 if (!ASIC_IS_DCE5(rdev)) evergreen_disable_interrupt_state()
4600 int evergreen_irq_set(struct radeon_device *rdev) evergreen_irq_set() argument
4611 if (!rdev->irq.installed) { evergreen_irq_set()
4616 if (!rdev->ih.enabled) { evergreen_irq_set()
4617 r600_disable_interrupts(rdev); evergreen_irq_set()
4619 evergreen_disable_interrupt_state(rdev); evergreen_irq_set()
4629 if (rdev->family == CHIP_ARUBA) evergreen_irq_set()
4645 if (rdev->family >= CHIP_CAYMAN) { evergreen_irq_set()
4647 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { evergreen_irq_set()
4651 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { evergreen_irq_set()
4655 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { evergreen_irq_set()
4660 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { evergreen_irq_set()
4667 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { evergreen_irq_set()
4672 if (rdev->family >= CHIP_CAYMAN) { evergreen_irq_set()
4674 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { evergreen_irq_set()
4680 if (rdev->irq.dpm_thermal) { evergreen_irq_set()
4685 if (rdev->irq.crtc_vblank_int[0] || evergreen_irq_set()
4686 atomic_read(&rdev->irq.pflip[0])) { evergreen_irq_set()
4690 if (rdev->irq.crtc_vblank_int[1] || evergreen_irq_set()
4691 atomic_read(&rdev->irq.pflip[1])) { evergreen_irq_set()
4695 if (rdev->irq.crtc_vblank_int[2] || evergreen_irq_set()
4696 atomic_read(&rdev->irq.pflip[2])) { evergreen_irq_set()
4700 if (rdev->irq.crtc_vblank_int[3] || evergreen_irq_set()
4701 atomic_read(&rdev->irq.pflip[3])) { evergreen_irq_set()
4705 if (rdev->irq.crtc_vblank_int[4] || evergreen_irq_set()
4706 atomic_read(&rdev->irq.pflip[4])) { evergreen_irq_set()
4710 if (rdev->irq.crtc_vblank_int[5] || evergreen_irq_set()
4711 atomic_read(&rdev->irq.pflip[5])) { evergreen_irq_set()
4715 if (rdev->irq.hpd[0]) { evergreen_irq_set()
4719 if (rdev->irq.hpd[1]) { evergreen_irq_set()
4723 if (rdev->irq.hpd[2]) { evergreen_irq_set()
4727 if (rdev->irq.hpd[3]) { evergreen_irq_set()
4731 if (rdev->irq.hpd[4]) { evergreen_irq_set()
4735 if (rdev->irq.hpd[5]) { evergreen_irq_set()
4739 if (rdev->irq.afmt[0]) { evergreen_irq_set()
4743 if (rdev->irq.afmt[1]) { evergreen_irq_set()
4747 if (rdev->irq.afmt[2]) { evergreen_irq_set()
4751 if (rdev->irq.afmt[3]) { evergreen_irq_set()
4755 if (rdev->irq.afmt[4]) { evergreen_irq_set()
4759 if (rdev->irq.afmt[5]) { evergreen_irq_set()
4764 if (rdev->family >= CHIP_CAYMAN) { evergreen_irq_set()
4765 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl); evergreen_irq_set()
4766 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1); evergreen_irq_set()
4767 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2); evergreen_irq_set()
4773 if (rdev->family >= CHIP_CAYMAN) evergreen_irq_set()
4780 if (rdev->num_crtc >= 4) { evergreen_irq_set()
4784 if (rdev->num_crtc >= 6) { evergreen_irq_set()
4793 if (rdev->num_crtc >= 4) { evergreen_irq_set()
4799 if (rdev->num_crtc >= 6) { evergreen_irq_set()
4812 if (rdev->family == CHIP_ARUBA) evergreen_irq_set()
4830 static void evergreen_irq_ack(struct radeon_device *rdev) evergreen_irq_ack() argument
4834 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS); evergreen_irq_ack()
4835 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); evergreen_irq_ack()
4836 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); evergreen_irq_ack()
4837 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); evergreen_irq_ack()
4838 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); evergreen_irq_ack()
4839 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); evergreen_irq_ack()
4840 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET); evergreen_irq_ack()
4841 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET); evergreen_irq_ack()
4842 if (rdev->num_crtc >= 4) { evergreen_irq_ack()
4843 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET); evergreen_irq_ack()
4844 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET); evergreen_irq_ack()
4846 if (rdev->num_crtc >= 6) { evergreen_irq_ack()
4847 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET); evergreen_irq_ack()
4848 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET); evergreen_irq_ack()
4851 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET); evergreen_irq_ack()
4852 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET); evergreen_irq_ack()
4853 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET); evergreen_irq_ack()
4854 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET); evergreen_irq_ack()
4855 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET); evergreen_irq_ack()
4856 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET); evergreen_irq_ack()
4858 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4860 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4862 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) evergreen_irq_ack()
4864 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) evergreen_irq_ack()
4866 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) evergreen_irq_ack()
4868 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) evergreen_irq_ack()
4871 if (rdev->num_crtc >= 4) { evergreen_irq_ack()
4872 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4874 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4876 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) evergreen_irq_ack()
4878 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) evergreen_irq_ack()
4880 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) evergreen_irq_ack()
4882 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) evergreen_irq_ack()
4886 if (rdev->num_crtc >= 6) { evergreen_irq_ack()
4887 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4889 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED) evergreen_irq_ack()
4891 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) evergreen_irq_ack()
4893 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) evergreen_irq_ack()
4895 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) evergreen_irq_ack()
4897 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) evergreen_irq_ack()
4901 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) { evergreen_irq_ack()
4906 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) { evergreen_irq_ack()
4911 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) { evergreen_irq_ack()
4916 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) { evergreen_irq_ack()
4921 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) { evergreen_irq_ack()
4926 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) { evergreen_irq_ack()
4932 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) { evergreen_irq_ack()
4937 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) { evergreen_irq_ack()
4942 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { evergreen_irq_ack()
4947 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { evergreen_irq_ack()
4952 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { evergreen_irq_ack()
4957 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { evergreen_irq_ack()
4963 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4968 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4973 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4978 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4983 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4988 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) { evergreen_irq_ack()
4995 static void evergreen_irq_disable(struct radeon_device *rdev) evergreen_irq_disable() argument
4997 r600_disable_interrupts(rdev); evergreen_irq_disable()
5000 evergreen_irq_ack(rdev); evergreen_irq_disable()
5001 evergreen_disable_interrupt_state(rdev); evergreen_irq_disable()
5004 void evergreen_irq_suspend(struct radeon_device *rdev) evergreen_irq_suspend() argument
5006 evergreen_irq_disable(rdev); evergreen_irq_suspend()
5007 r600_rlc_stop(rdev); evergreen_irq_suspend()
5010 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev) evergreen_get_ih_wptr() argument
5014 if (rdev->wb.enabled) evergreen_get_ih_wptr()
5015 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); evergreen_get_ih_wptr()
5025 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", evergreen_get_ih_wptr()
5026 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); evergreen_get_ih_wptr()
5027 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; evergreen_get_ih_wptr()
5032 return (wptr & rdev->ih.ptr_mask); evergreen_get_ih_wptr()
5035 int evergreen_irq_process(struct radeon_device *rdev) evergreen_irq_process() argument
5047 if (!rdev->ih.enabled || rdev->shutdown) evergreen_irq_process()
5050 wptr = evergreen_get_ih_wptr(rdev); evergreen_irq_process()
5054 if (atomic_xchg(&rdev->ih.lock, 1)) evergreen_irq_process()
5057 rptr = rdev->ih.rptr; evergreen_irq_process()
5064 evergreen_irq_ack(rdev); evergreen_irq_process()
5069 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; evergreen_irq_process()
5070 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; evergreen_irq_process()
5076 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)) evergreen_irq_process()
5079 if (rdev->irq.crtc_vblank_int[0]) { evergreen_irq_process()
5080 drm_handle_vblank(rdev->ddev, 0); evergreen_irq_process()
5081 rdev->pm.vblank_sync = true; evergreen_irq_process()
5082 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5084 if (atomic_read(&rdev->irq.pflip[0])) evergreen_irq_process()
5085 radeon_crtc_handle_vblank(rdev, 0); evergreen_irq_process()
5086 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT; evergreen_irq_process()
5091 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)) evergreen_irq_process()
5094 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT; evergreen_irq_process()
5106 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) evergreen_irq_process()
5109 if (rdev->irq.crtc_vblank_int[1]) { evergreen_irq_process()
5110 drm_handle_vblank(rdev->ddev, 1); evergreen_irq_process()
5111 rdev->pm.vblank_sync = true; evergreen_irq_process()
5112 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5114 if (atomic_read(&rdev->irq.pflip[1])) evergreen_irq_process()
5115 radeon_crtc_handle_vblank(rdev, 1); evergreen_irq_process()
5116 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; evergreen_irq_process()
5121 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)) evergreen_irq_process()
5124 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; evergreen_irq_process()
5136 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) evergreen_irq_process()
5139 if (rdev->irq.crtc_vblank_int[2]) { evergreen_irq_process()
5140 drm_handle_vblank(rdev->ddev, 2); evergreen_irq_process()
5141 rdev->pm.vblank_sync = true; evergreen_irq_process()
5142 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5144 if (atomic_read(&rdev->irq.pflip[2])) evergreen_irq_process()
5145 radeon_crtc_handle_vblank(rdev, 2); evergreen_irq_process()
5146 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; evergreen_irq_process()
5151 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) evergreen_irq_process()
5154 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; evergreen_irq_process()
5166 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) evergreen_irq_process()
5169 if (rdev->irq.crtc_vblank_int[3]) { evergreen_irq_process()
5170 drm_handle_vblank(rdev->ddev, 3); evergreen_irq_process()
5171 rdev->pm.vblank_sync = true; evergreen_irq_process()
5172 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5174 if (atomic_read(&rdev->irq.pflip[3])) evergreen_irq_process()
5175 radeon_crtc_handle_vblank(rdev, 3); evergreen_irq_process()
5176 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; evergreen_irq_process()
5181 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) evergreen_irq_process()
5184 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; evergreen_irq_process()
5196 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) evergreen_irq_process()
5199 if (rdev->irq.crtc_vblank_int[4]) { evergreen_irq_process()
5200 drm_handle_vblank(rdev->ddev, 4); evergreen_irq_process()
5201 rdev->pm.vblank_sync = true; evergreen_irq_process()
5202 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5204 if (atomic_read(&rdev->irq.pflip[4])) evergreen_irq_process()
5205 radeon_crtc_handle_vblank(rdev, 4); evergreen_irq_process()
5206 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; evergreen_irq_process()
5211 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) evergreen_irq_process()
5214 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; evergreen_irq_process()
5226 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) evergreen_irq_process()
5229 if (rdev->irq.crtc_vblank_int[5]) { evergreen_irq_process()
5230 drm_handle_vblank(rdev->ddev, 5); evergreen_irq_process()
5231 rdev->pm.vblank_sync = true; evergreen_irq_process()
5232 wake_up(&rdev->irq.vblank_queue); evergreen_irq_process()
5234 if (atomic_read(&rdev->irq.pflip[5])) evergreen_irq_process()
5235 radeon_crtc_handle_vblank(rdev, 5); evergreen_irq_process()
5236 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; evergreen_irq_process()
5241 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) evergreen_irq_process()
5244 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; evergreen_irq_process()
5261 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); evergreen_irq_process()
5266 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT)) evergreen_irq_process()
5269 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT; evergreen_irq_process()
5274 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT)) evergreen_irq_process()
5277 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT; evergreen_irq_process()
5282 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT)) evergreen_irq_process()
5285 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; evergreen_irq_process()
5290 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT)) evergreen_irq_process()
5293 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; evergreen_irq_process()
5298 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT)) evergreen_irq_process()
5301 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; evergreen_irq_process()
5306 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT)) evergreen_irq_process()
5309 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; evergreen_irq_process()
5314 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT)) evergreen_irq_process()
5317 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT; evergreen_irq_process()
5322 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT)) evergreen_irq_process()
5325 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; evergreen_irq_process()
5330 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) evergreen_irq_process()
5333 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; evergreen_irq_process()
5338 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) evergreen_irq_process()
5341 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; evergreen_irq_process()
5346 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) evergreen_irq_process()
5349 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; evergreen_irq_process()
5354 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) evergreen_irq_process()
5357 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; evergreen_irq_process()
5369 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5372 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5377 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5380 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5385 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5388 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5393 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5396 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5401 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5404 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5409 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG)) evergreen_irq_process()
5412 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG; evergreen_irq_process()
5426 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); evergreen_irq_process()
5436 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); evergreen_irq_process()
5437 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", evergreen_irq_process()
5439 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", evergreen_irq_process()
5441 cayman_vm_decode_fault(rdev, status, addr); evergreen_irq_process()
5447 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); evergreen_irq_process()
5451 if (rdev->family >= CHIP_CAYMAN) { evergreen_irq_process()
5454 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); evergreen_irq_process()
5457 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); evergreen_irq_process()
5460 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); evergreen_irq_process()
5464 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); evergreen_irq_process()
5468 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); evergreen_irq_process()
5472 rdev->pm.dpm.thermal.high_to_low = false; evergreen_irq_process()
5477 rdev->pm.dpm.thermal.high_to_low = true; evergreen_irq_process()
5484 if (rdev->family >= CHIP_CAYMAN) { evergreen_irq_process()
5486 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); evergreen_irq_process()
5496 rptr &= rdev->ih.ptr_mask; evergreen_irq_process()
5500 schedule_work(&rdev->dp_work); evergreen_irq_process()
5502 schedule_delayed_work(&rdev->hotplug_work, 0); evergreen_irq_process()
5504 schedule_work(&rdev->audio_work); evergreen_irq_process()
5505 if (queue_thermal && rdev->pm.dpm_enabled) evergreen_irq_process()
5506 schedule_work(&rdev->pm.dpm.thermal.work); evergreen_irq_process()
5507 rdev->ih.rptr = rptr; evergreen_irq_process()
5508 atomic_set(&rdev->ih.lock, 0); evergreen_irq_process()
5511 wptr = evergreen_get_ih_wptr(rdev); evergreen_irq_process()
5518 static int evergreen_startup(struct radeon_device *rdev) evergreen_startup() argument
5524 evergreen_pcie_gen2_enable(rdev); evergreen_startup()
5526 evergreen_program_aspm(rdev); evergreen_startup()
5529 r = r600_vram_scratch_init(rdev); evergreen_startup()
5533 evergreen_mc_program(rdev); evergreen_startup()
5535 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) { evergreen_startup()
5536 r = ni_mc_load_microcode(rdev); evergreen_startup()
5543 if (rdev->flags & RADEON_IS_AGP) { evergreen_startup()
5544 evergreen_agp_enable(rdev); evergreen_startup()
5546 r = evergreen_pcie_gart_enable(rdev); evergreen_startup()
5550 evergreen_gpu_init(rdev); evergreen_startup()
5553 if (rdev->flags & RADEON_IS_IGP) { evergreen_startup()
5554 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list; evergreen_startup()
5555 rdev->rlc.reg_list_size = evergreen_startup()
5557 rdev->rlc.cs_data = evergreen_cs_data; evergreen_startup()
5558 r = sumo_rlc_init(rdev); evergreen_startup()
5566 r = radeon_wb_init(rdev); evergreen_startup()
5570 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); evergreen_startup()
5572 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); evergreen_startup()
5576 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); evergreen_startup()
5578 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); evergreen_startup()
5582 r = uvd_v2_2_resume(rdev); evergreen_startup()
5584 r = radeon_fence_driver_start_ring(rdev, evergreen_startup()
5587 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); evergreen_startup()
5591 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; evergreen_startup()
5594 if (!rdev->irq.installed) { evergreen_startup()
5595 r = radeon_irq_kms_init(rdev); evergreen_startup()
5600 r = r600_irq_init(rdev); evergreen_startup()
5603 radeon_irq_kms_fini(rdev); evergreen_startup()
5606 evergreen_irq_set(rdev); evergreen_startup()
5608 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; evergreen_startup()
5609 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, evergreen_startup()
5614 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; evergreen_startup()
5615 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, evergreen_startup()
5620 r = evergreen_cp_load_microcode(rdev); evergreen_startup()
5623 r = evergreen_cp_resume(rdev); evergreen_startup()
5626 r = r600_dma_resume(rdev); evergreen_startup()
5630 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; evergreen_startup()
5632 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, evergreen_startup()
5635 r = uvd_v1_0_init(rdev); evergreen_startup()
5641 r = radeon_ib_pool_init(rdev); evergreen_startup()
5643 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); evergreen_startup()
5647 r = radeon_audio_init(rdev); evergreen_startup()
5656 int evergreen_resume(struct radeon_device *rdev) evergreen_resume() argument
5663 if (radeon_asic_reset(rdev)) evergreen_resume()
5664 dev_warn(rdev->dev, "GPU reset failed !\n"); evergreen_resume()
5670 atom_asic_init(rdev->mode_info.atom_context); evergreen_resume()
5673 evergreen_init_golden_registers(rdev); evergreen_resume()
5675 if (rdev->pm.pm_method == PM_METHOD_DPM) evergreen_resume()
5676 radeon_pm_resume(rdev); evergreen_resume()
5678 rdev->accel_working = true; evergreen_resume()
5679 r = evergreen_startup(rdev); evergreen_resume()
5682 rdev->accel_working = false; evergreen_resume()
5690 int evergreen_suspend(struct radeon_device *rdev) evergreen_suspend() argument
5692 radeon_pm_suspend(rdev); evergreen_suspend()
5693 radeon_audio_fini(rdev); evergreen_suspend()
5694 uvd_v1_0_fini(rdev); evergreen_suspend()
5695 radeon_uvd_suspend(rdev); evergreen_suspend()
5696 r700_cp_stop(rdev); evergreen_suspend()
5697 r600_dma_stop(rdev); evergreen_suspend()
5698 evergreen_irq_suspend(rdev); evergreen_suspend()
5699 radeon_wb_disable(rdev); evergreen_suspend()
5700 evergreen_pcie_gart_disable(rdev); evergreen_suspend()
5711 int evergreen_init(struct radeon_device *rdev) evergreen_init() argument
5716 if (!radeon_get_bios(rdev)) { evergreen_init()
5717 if (ASIC_IS_AVIVO(rdev)) evergreen_init()
5721 if (!rdev->is_atom_bios) { evergreen_init()
5722 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n"); evergreen_init()
5725 r = radeon_atombios_init(rdev); evergreen_init()
5731 if (radeon_asic_reset(rdev)) evergreen_init()
5732 dev_warn(rdev->dev, "GPU reset failed !\n"); evergreen_init()
5734 if (!radeon_card_posted(rdev)) { evergreen_init()
5735 if (!rdev->bios) { evergreen_init()
5736 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); evergreen_init()
5740 atom_asic_init(rdev->mode_info.atom_context); evergreen_init()
5743 evergreen_init_golden_registers(rdev); evergreen_init()
5745 r600_scratch_init(rdev); evergreen_init()
5747 radeon_surface_init(rdev); evergreen_init()
5749 radeon_get_clock_info(rdev->ddev); evergreen_init()
5751 r = radeon_fence_driver_init(rdev); evergreen_init()
5755 if (rdev->flags & RADEON_IS_AGP) { evergreen_init()
5756 r = radeon_agp_init(rdev); evergreen_init()
5758 radeon_agp_disable(rdev); evergreen_init()
5761 r = evergreen_mc_init(rdev); evergreen_init()
5765 r = radeon_bo_init(rdev); evergreen_init()
5769 if (ASIC_IS_DCE5(rdev)) { evergreen_init()
5770 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) { evergreen_init()
5771 r = ni_init_microcode(rdev); evergreen_init()
5778 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { evergreen_init()
5779 r = r600_init_microcode(rdev); evergreen_init()
5788 radeon_pm_init(rdev); evergreen_init()
5790 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL; evergreen_init()
5791 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024); evergreen_init()
5793 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL; evergreen_init()
5794 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024); evergreen_init()
5796 r = radeon_uvd_init(rdev); evergreen_init()
5798 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; evergreen_init()
5799 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], evergreen_init()
5803 rdev->ih.ring_obj = NULL; evergreen_init()
5804 r600_ih_ring_init(rdev, 64 * 1024); evergreen_init()
5806 r = r600_pcie_gart_init(rdev); evergreen_init()
5810 rdev->accel_working = true; evergreen_init()
5811 r = evergreen_startup(rdev); evergreen_init()
5813 dev_err(rdev->dev, "disabling GPU acceleration\n"); evergreen_init()
5814 r700_cp_fini(rdev); evergreen_init()
5815 r600_dma_fini(rdev); evergreen_init()
5816 r600_irq_fini(rdev); evergreen_init()
5817 if (rdev->flags & RADEON_IS_IGP) evergreen_init()
5818 sumo_rlc_fini(rdev); evergreen_init()
5819 radeon_wb_fini(rdev); evergreen_init()
5820 radeon_ib_pool_fini(rdev); evergreen_init()
5821 radeon_irq_kms_fini(rdev); evergreen_init()
5822 evergreen_pcie_gart_fini(rdev); evergreen_init()
5823 rdev->accel_working = false; evergreen_init()
5830 if (ASIC_IS_DCE5(rdev)) { evergreen_init()
5831 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { evergreen_init()
5840 void evergreen_fini(struct radeon_device *rdev) evergreen_fini() argument
5842 radeon_pm_fini(rdev); evergreen_fini()
5843 radeon_audio_fini(rdev); evergreen_fini()
5844 r700_cp_fini(rdev); evergreen_fini()
5845 r600_dma_fini(rdev); evergreen_fini()
5846 r600_irq_fini(rdev); evergreen_fini()
5847 if (rdev->flags & RADEON_IS_IGP) evergreen_fini()
5848 sumo_rlc_fini(rdev); evergreen_fini()
5849 radeon_wb_fini(rdev); evergreen_fini()
5850 radeon_ib_pool_fini(rdev); evergreen_fini()
5851 radeon_irq_kms_fini(rdev); evergreen_fini()
5852 uvd_v1_0_fini(rdev); evergreen_fini()
5853 radeon_uvd_fini(rdev); evergreen_fini()
5854 evergreen_pcie_gart_fini(rdev); evergreen_fini()
5855 r600_vram_scratch_fini(rdev); evergreen_fini()
5856 radeon_gem_fini(rdev); evergreen_fini()
5857 radeon_fence_driver_fini(rdev); evergreen_fini()
5858 radeon_agp_fini(rdev); evergreen_fini()
5859 radeon_bo_fini(rdev); evergreen_fini()
5860 radeon_atombios_fini(rdev); evergreen_fini()
5861 kfree(rdev->bios); evergreen_fini()
5862 rdev->bios = NULL; evergreen_fini()
5865 void evergreen_pcie_gen2_enable(struct radeon_device *rdev) evergreen_pcie_gen2_enable() argument
5872 if (rdev->flags & RADEON_IS_IGP) evergreen_pcie_gen2_enable()
5875 if (!(rdev->flags & RADEON_IS_PCIE)) evergreen_pcie_gen2_enable()
5879 if (ASIC_IS_X2(rdev)) evergreen_pcie_gen2_enable()
5882 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) && evergreen_pcie_gen2_enable()
5883 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT)) evergreen_pcie_gen2_enable()
5928 void evergreen_program_aspm(struct radeon_device *rdev) evergreen_program_aspm() argument
5943 if (!(rdev->flags & RADEON_IS_PCIE)) evergreen_program_aspm()
5946 switch (rdev->family) { evergreen_program_aspm()
5963 if (rdev->flags & RADEON_IS_IGP) evergreen_program_aspm()
5985 if (rdev->family >= CHIP_BARTS) evergreen_program_aspm()
5992 if (rdev->family >= CHIP_BARTS) evergreen_program_aspm()
6022 if (rdev->family >= CHIP_BARTS) { evergreen_program_aspm()
6054 if (rdev->family >= CHIP_BARTS) { evergreen_program_aspm()
6071 if (rdev->family < CHIP_BARTS) evergreen_program_aspm()
H A Dkv_dpm.c36 static int kv_enable_nb_dpm(struct radeon_device *rdev,
38 static void kv_init_graphics_levels(struct radeon_device *rdev);
39 static int kv_calculate_ds_divider(struct radeon_device *rdev);
40 static int kv_calculate_nbps_level_settings(struct radeon_device *rdev);
41 static int kv_calculate_dpm_settings(struct radeon_device *rdev);
42 static void kv_enable_new_levels(struct radeon_device *rdev);
43 static void kv_program_nbps_index_settings(struct radeon_device *rdev,
45 static int kv_set_enabled_level(struct radeon_device *rdev, u32 level);
46 static int kv_set_enabled_levels(struct radeon_device *rdev);
47 static int kv_force_dpm_highest(struct radeon_device *rdev);
48 static int kv_force_dpm_lowest(struct radeon_device *rdev);
49 static void kv_apply_state_adjust_rules(struct radeon_device *rdev,
52 static int kv_set_thermal_temperature_range(struct radeon_device *rdev,
54 static int kv_init_fps_limits(struct radeon_device *rdev);
56 void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate);
57 static void kv_dpm_powergate_vce(struct radeon_device *rdev, bool gate);
58 static void kv_dpm_powergate_samu(struct radeon_device *rdev, bool gate);
59 static void kv_dpm_powergate_acp(struct radeon_device *rdev, bool gate);
61 extern void cik_enter_rlc_safe_mode(struct radeon_device *rdev);
62 extern void cik_exit_rlc_safe_mode(struct radeon_device *rdev);
63 extern void cik_update_cg(struct radeon_device *rdev,
249 static struct kv_power_info *kv_get_pi(struct radeon_device *rdev) kv_get_pi() argument
251 struct kv_power_info *pi = rdev->pm.dpm.priv; kv_get_pi()
257 static void kv_program_local_cac_table(struct radeon_device *rdev,
282 static int kv_program_pt_config_registers(struct radeon_device *rdev, kv_program_pt_config_registers() argument
331 static void kv_do_enable_didt(struct radeon_device *rdev, bool enable) kv_do_enable_didt() argument
333 struct kv_power_info *pi = kv_get_pi(rdev); kv_do_enable_didt()
373 static int kv_enable_didt(struct radeon_device *rdev, bool enable) kv_enable_didt() argument
375 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_didt()
382 cik_enter_rlc_safe_mode(rdev); kv_enable_didt()
385 ret = kv_program_pt_config_registers(rdev, didt_config_kv); kv_enable_didt()
387 cik_exit_rlc_safe_mode(rdev); kv_enable_didt()
392 kv_do_enable_didt(rdev, enable); kv_enable_didt()
394 cik_exit_rlc_safe_mode(rdev); kv_enable_didt()
401 static void kv_initialize_hardware_cac_manager(struct radeon_device *rdev)
403 struct kv_power_info *pi = kv_get_pi(rdev);
408 kv_program_local_cac_table(rdev, sx_local_cac_cfg_kv, sx0_cac_config_reg);
412 kv_program_local_cac_table(rdev, mc0_local_cac_cfg_kv, mc0_cac_config_reg);
416 kv_program_local_cac_table(rdev, mc1_local_cac_cfg_kv, mc1_cac_config_reg);
420 kv_program_local_cac_table(rdev, mc2_local_cac_cfg_kv, mc2_cac_config_reg);
424 kv_program_local_cac_table(rdev, mc3_local_cac_cfg_kv, mc3_cac_config_reg);
428 kv_program_local_cac_table(rdev, cpl_local_cac_cfg_kv, cpl_cac_config_reg);
433 static int kv_enable_smc_cac(struct radeon_device *rdev, bool enable) kv_enable_smc_cac() argument
435 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_smc_cac()
440 ret = kv_notify_message_to_smu(rdev, PPSMC_MSG_EnableCac); kv_enable_smc_cac()
446 kv_notify_message_to_smu(rdev, PPSMC_MSG_DisableCac); kv_enable_smc_cac()
454 static int kv_process_firmware_header(struct radeon_device *rdev) kv_process_firmware_header() argument
456 struct kv_power_info *pi = kv_get_pi(rdev); kv_process_firmware_header()
460 ret = kv_read_smc_sram_dword(rdev, SMU7_FIRMWARE_HEADER_LOCATION + kv_process_firmware_header()
467 ret = kv_read_smc_sram_dword(rdev, SMU7_FIRMWARE_HEADER_LOCATION + kv_process_firmware_header()
477 static int kv_enable_dpm_voltage_scaling(struct radeon_device *rdev) kv_enable_dpm_voltage_scaling() argument
479 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_dpm_voltage_scaling()
484 ret = kv_copy_bytes_to_smc(rdev, kv_enable_dpm_voltage_scaling()
493 static int kv_set_dpm_interval(struct radeon_device *rdev) kv_set_dpm_interval() argument
495 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_dpm_interval()
500 ret = kv_copy_bytes_to_smc(rdev, kv_set_dpm_interval()
509 static int kv_set_dpm_boot_state(struct radeon_device *rdev) kv_set_dpm_boot_state() argument
511 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_dpm_boot_state()
514 ret = kv_copy_bytes_to_smc(rdev, kv_set_dpm_boot_state()
523 static void kv_program_vc(struct radeon_device *rdev) kv_program_vc() argument
528 static void kv_clear_vc(struct radeon_device *rdev) kv_clear_vc() argument
533 static int kv_set_divider_value(struct radeon_device *rdev, kv_set_divider_value() argument
536 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_divider_value()
540 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_set_divider_value()
551 static u32 kv_convert_vid2_to_vid7(struct radeon_device *rdev, kv_convert_vid2_to_vid7() argument
556 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_convert_vid2_to_vid7()
573 static u32 kv_convert_vid7_to_vid2(struct radeon_device *rdev, kv_convert_vid7_to_vid2() argument
578 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_convert_vid7_to_vid2()
597 static u16 kv_convert_8bit_index_to_voltage(struct radeon_device *rdev, kv_convert_8bit_index_to_voltage() argument
603 static u16 kv_convert_2bit_index_to_voltage(struct radeon_device *rdev, kv_convert_2bit_index_to_voltage() argument
606 struct kv_power_info *pi = kv_get_pi(rdev); kv_convert_2bit_index_to_voltage()
607 u32 vid_8bit = kv_convert_vid2_to_vid7(rdev, kv_convert_2bit_index_to_voltage()
611 return kv_convert_8bit_index_to_voltage(rdev, (u16)vid_8bit); kv_convert_2bit_index_to_voltage()
615 static int kv_set_vid(struct radeon_device *rdev, u32 index, u32 vid) kv_set_vid() argument
617 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_vid()
621 cpu_to_be32(kv_convert_2bit_index_to_voltage(rdev, vid)); kv_set_vid()
626 static int kv_set_at(struct radeon_device *rdev, u32 index, u32 at) kv_set_at() argument
628 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_at()
635 static void kv_dpm_power_level_enable(struct radeon_device *rdev, kv_dpm_power_level_enable() argument
638 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_power_level_enable()
643 static void kv_start_dpm(struct radeon_device *rdev) kv_start_dpm() argument
650 kv_smc_dpm_enable(rdev, true); kv_start_dpm()
653 static void kv_stop_dpm(struct radeon_device *rdev) kv_stop_dpm() argument
655 kv_smc_dpm_enable(rdev, false); kv_stop_dpm()
658 static void kv_start_am(struct radeon_device *rdev) kv_start_am() argument
668 static void kv_reset_am(struct radeon_device *rdev) kv_reset_am() argument
677 static int kv_freeze_sclk_dpm(struct radeon_device *rdev, bool freeze) kv_freeze_sclk_dpm() argument
679 return kv_notify_message_to_smu(rdev, freeze ? kv_freeze_sclk_dpm()
683 static int kv_force_lowest_valid(struct radeon_device *rdev) kv_force_lowest_valid() argument
685 return kv_force_dpm_lowest(rdev); kv_force_lowest_valid()
688 static int kv_unforce_levels(struct radeon_device *rdev) kv_unforce_levels() argument
690 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_unforce_levels()
691 return kv_notify_message_to_smu(rdev, PPSMC_MSG_NoForcedLevel); kv_unforce_levels()
693 return kv_set_enabled_levels(rdev); kv_unforce_levels()
696 static int kv_update_sclk_t(struct radeon_device *rdev) kv_update_sclk_t() argument
698 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_sclk_t()
705 ret = kv_copy_bytes_to_smc(rdev, kv_update_sclk_t()
714 static int kv_program_bootup_state(struct radeon_device *rdev) kv_program_bootup_state() argument
716 struct kv_power_info *pi = kv_get_pi(rdev); kv_program_bootup_state()
719 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_program_bootup_state()
728 kv_dpm_power_level_enable(rdev, i, true); kv_program_bootup_state()
742 kv_dpm_power_level_enable(rdev, i, true); kv_program_bootup_state()
747 static int kv_enable_auto_thermal_throttling(struct radeon_device *rdev) kv_enable_auto_thermal_throttling() argument
749 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_auto_thermal_throttling()
754 ret = kv_copy_bytes_to_smc(rdev, kv_enable_auto_thermal_throttling()
763 static int kv_upload_dpm_settings(struct radeon_device *rdev) kv_upload_dpm_settings() argument
765 struct kv_power_info *pi = kv_get_pi(rdev); kv_upload_dpm_settings()
768 ret = kv_copy_bytes_to_smc(rdev, kv_upload_dpm_settings()
778 ret = kv_copy_bytes_to_smc(rdev, kv_upload_dpm_settings()
792 static u32 kv_get_clk_bypass(struct radeon_device *rdev, u32 clk) kv_get_clk_bypass() argument
794 struct kv_power_info *pi = kv_get_pi(rdev); kv_get_clk_bypass()
817 static int kv_populate_uvd_table(struct radeon_device *rdev) kv_populate_uvd_table() argument
819 struct kv_power_info *pi = kv_get_pi(rdev); kv_populate_uvd_table()
821 &rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table; kv_populate_uvd_table()
840 (u8)kv_get_clk_bypass(rdev, table->entries[i].vclk); kv_populate_uvd_table()
842 (u8)kv_get_clk_bypass(rdev, table->entries[i].dclk); kv_populate_uvd_table()
844 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_populate_uvd_table()
850 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_populate_uvd_table()
859 ret = kv_copy_bytes_to_smc(rdev, kv_populate_uvd_table()
869 ret = kv_copy_bytes_to_smc(rdev, kv_populate_uvd_table()
877 ret = kv_copy_bytes_to_smc(rdev, kv_populate_uvd_table()
888 static int kv_populate_vce_table(struct radeon_device *rdev) kv_populate_vce_table() argument
890 struct kv_power_info *pi = kv_get_pi(rdev); kv_populate_vce_table()
894 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; kv_populate_vce_table()
910 (u8)kv_get_clk_bypass(rdev, table->entries[i].evclk); kv_populate_vce_table()
912 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_populate_vce_table()
921 ret = kv_copy_bytes_to_smc(rdev, kv_populate_vce_table()
932 ret = kv_copy_bytes_to_smc(rdev, kv_populate_vce_table()
941 ret = kv_copy_bytes_to_smc(rdev, kv_populate_vce_table()
951 static int kv_populate_samu_table(struct radeon_device *rdev) kv_populate_samu_table() argument
953 struct kv_power_info *pi = kv_get_pi(rdev); kv_populate_samu_table()
955 &rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table; kv_populate_samu_table()
973 (u8)kv_get_clk_bypass(rdev, table->entries[i].clk); kv_populate_samu_table()
975 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_populate_samu_table()
984 ret = kv_copy_bytes_to_smc(rdev, kv_populate_samu_table()
995 ret = kv_copy_bytes_to_smc(rdev, kv_populate_samu_table()
1004 ret = kv_copy_bytes_to_smc(rdev, kv_populate_samu_table()
1017 static int kv_populate_acp_table(struct radeon_device *rdev) kv_populate_acp_table() argument
1019 struct kv_power_info *pi = kv_get_pi(rdev); kv_populate_acp_table()
1021 &rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table; kv_populate_acp_table()
1034 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, kv_populate_acp_table()
1043 ret = kv_copy_bytes_to_smc(rdev, kv_populate_acp_table()
1054 ret = kv_copy_bytes_to_smc(rdev, kv_populate_acp_table()
1063 ret = kv_copy_bytes_to_smc(rdev, kv_populate_acp_table()
1075 static void kv_calculate_dfs_bypass_settings(struct radeon_device *rdev) kv_calculate_dfs_bypass_settings() argument
1077 struct kv_power_info *pi = kv_get_pi(rdev); kv_calculate_dfs_bypass_settings()
1080 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_calculate_dfs_bypass_settings()
1125 static int kv_enable_ulv(struct radeon_device *rdev, bool enable) kv_enable_ulv() argument
1127 return kv_notify_message_to_smu(rdev, enable ? kv_enable_ulv()
1131 static void kv_reset_acp_boot_level(struct radeon_device *rdev) kv_reset_acp_boot_level() argument
1133 struct kv_power_info *pi = kv_get_pi(rdev); kv_reset_acp_boot_level()
1138 static void kv_update_current_ps(struct radeon_device *rdev, kv_update_current_ps() argument
1142 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_current_ps()
1149 static void kv_update_requested_ps(struct radeon_device *rdev, kv_update_requested_ps() argument
1153 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_requested_ps()
1160 void kv_dpm_enable_bapm(struct radeon_device *rdev, bool enable) kv_dpm_enable_bapm() argument
1162 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_enable_bapm()
1166 ret = kv_smc_bapm_enable(rdev, enable); kv_dpm_enable_bapm()
1172 static void kv_enable_thermal_int(struct radeon_device *rdev, bool enable) kv_enable_thermal_int() argument
1185 int kv_dpm_enable(struct radeon_device *rdev) kv_dpm_enable() argument
1187 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_enable()
1190 ret = kv_process_firmware_header(rdev); kv_dpm_enable()
1195 kv_init_fps_limits(rdev); kv_dpm_enable()
1196 kv_init_graphics_levels(rdev); kv_dpm_enable()
1197 ret = kv_program_bootup_state(rdev); kv_dpm_enable()
1202 kv_calculate_dfs_bypass_settings(rdev); kv_dpm_enable()
1203 ret = kv_upload_dpm_settings(rdev); kv_dpm_enable()
1208 ret = kv_populate_uvd_table(rdev); kv_dpm_enable()
1213 ret = kv_populate_vce_table(rdev); kv_dpm_enable()
1218 ret = kv_populate_samu_table(rdev); kv_dpm_enable()
1223 ret = kv_populate_acp_table(rdev); kv_dpm_enable()
1228 kv_program_vc(rdev); kv_dpm_enable()
1230 kv_initialize_hardware_cac_manager(rdev); kv_dpm_enable()
1232 kv_start_am(rdev); kv_dpm_enable()
1234 ret = kv_enable_auto_thermal_throttling(rdev); kv_dpm_enable()
1240 ret = kv_enable_dpm_voltage_scaling(rdev); kv_dpm_enable()
1245 ret = kv_set_dpm_interval(rdev); kv_dpm_enable()
1250 ret = kv_set_dpm_boot_state(rdev); kv_dpm_enable()
1255 ret = kv_enable_ulv(rdev, true); kv_dpm_enable()
1260 kv_start_dpm(rdev); kv_dpm_enable()
1261 ret = kv_enable_didt(rdev, true); kv_dpm_enable()
1266 ret = kv_enable_smc_cac(rdev, true); kv_dpm_enable()
1272 kv_reset_acp_boot_level(rdev); kv_dpm_enable()
1274 ret = kv_smc_bapm_enable(rdev, false); kv_dpm_enable()
1280 kv_update_current_ps(rdev, rdev->pm.dpm.boot_ps); kv_dpm_enable()
1285 int kv_dpm_late_enable(struct radeon_device *rdev) kv_dpm_late_enable() argument
1289 if (rdev->irq.installed && kv_dpm_late_enable()
1290 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { kv_dpm_late_enable()
1291 ret = kv_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); kv_dpm_late_enable()
1296 kv_enable_thermal_int(rdev, true); kv_dpm_late_enable()
1300 kv_dpm_powergate_acp(rdev, true); kv_dpm_late_enable()
1301 kv_dpm_powergate_samu(rdev, true); kv_dpm_late_enable()
1302 kv_dpm_powergate_vce(rdev, true); kv_dpm_late_enable()
1303 kv_dpm_powergate_uvd(rdev, true); kv_dpm_late_enable()
1308 void kv_dpm_disable(struct radeon_device *rdev) kv_dpm_disable() argument
1310 kv_smc_bapm_enable(rdev, false); kv_dpm_disable()
1312 if (rdev->family == CHIP_MULLINS) kv_dpm_disable()
1313 kv_enable_nb_dpm(rdev, false); kv_dpm_disable()
1316 kv_dpm_powergate_acp(rdev, false); kv_dpm_disable()
1317 kv_dpm_powergate_samu(rdev, false); kv_dpm_disable()
1318 kv_dpm_powergate_vce(rdev, false); kv_dpm_disable()
1319 kv_dpm_powergate_uvd(rdev, false); kv_dpm_disable()
1321 kv_enable_smc_cac(rdev, false); kv_dpm_disable()
1322 kv_enable_didt(rdev, false); kv_dpm_disable()
1323 kv_clear_vc(rdev); kv_dpm_disable()
1324 kv_stop_dpm(rdev); kv_dpm_disable()
1325 kv_enable_ulv(rdev, false); kv_dpm_disable()
1326 kv_reset_am(rdev); kv_dpm_disable()
1327 kv_enable_thermal_int(rdev, false); kv_dpm_disable()
1329 kv_update_current_ps(rdev, rdev->pm.dpm.boot_ps); kv_dpm_disable()
1333 static int kv_write_smc_soft_register(struct radeon_device *rdev,
1336 struct kv_power_info *pi = kv_get_pi(rdev);
1338 return kv_copy_bytes_to_smc(rdev, pi->soft_regs_start + reg_offset,
1342 static int kv_read_smc_soft_register(struct radeon_device *rdev,
1345 struct kv_power_info *pi = kv_get_pi(rdev);
1347 return kv_read_smc_sram_dword(rdev, pi->soft_regs_start + reg_offset,
1352 static void kv_init_sclk_t(struct radeon_device *rdev) kv_init_sclk_t() argument
1354 struct kv_power_info *pi = kv_get_pi(rdev); kv_init_sclk_t()
1359 static int kv_init_fps_limits(struct radeon_device *rdev) kv_init_fps_limits() argument
1361 struct kv_power_info *pi = kv_get_pi(rdev); kv_init_fps_limits()
1369 ret = kv_copy_bytes_to_smc(rdev, kv_init_fps_limits()
1378 ret = kv_copy_bytes_to_smc(rdev, kv_init_fps_limits()
1388 static void kv_init_powergate_state(struct radeon_device *rdev) kv_init_powergate_state() argument
1390 struct kv_power_info *pi = kv_get_pi(rdev); kv_init_powergate_state()
1399 static int kv_enable_uvd_dpm(struct radeon_device *rdev, bool enable) kv_enable_uvd_dpm() argument
1401 return kv_notify_message_to_smu(rdev, enable ? kv_enable_uvd_dpm()
1405 static int kv_enable_vce_dpm(struct radeon_device *rdev, bool enable) kv_enable_vce_dpm() argument
1407 return kv_notify_message_to_smu(rdev, enable ? kv_enable_vce_dpm()
1411 static int kv_enable_samu_dpm(struct radeon_device *rdev, bool enable) kv_enable_samu_dpm() argument
1413 return kv_notify_message_to_smu(rdev, enable ? kv_enable_samu_dpm()
1417 static int kv_enable_acp_dpm(struct radeon_device *rdev, bool enable) kv_enable_acp_dpm() argument
1419 return kv_notify_message_to_smu(rdev, enable ? kv_enable_acp_dpm()
1423 static int kv_update_uvd_dpm(struct radeon_device *rdev, bool gate) kv_update_uvd_dpm() argument
1425 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_uvd_dpm()
1427 &rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table; kv_update_uvd_dpm()
1443 ret = kv_copy_bytes_to_smc(rdev, kv_update_uvd_dpm()
1451 kv_send_msg_to_smc_with_parameter(rdev, kv_update_uvd_dpm()
1456 return kv_enable_uvd_dpm(rdev, !gate); kv_update_uvd_dpm()
1459 static u8 kv_get_vce_boot_level(struct radeon_device *rdev, u32 evclk) kv_get_vce_boot_level() argument
1463 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; kv_get_vce_boot_level()
1473 static int kv_update_vce_dpm(struct radeon_device *rdev, kv_update_vce_dpm() argument
1477 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_vce_dpm()
1479 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; kv_update_vce_dpm()
1483 kv_dpm_powergate_vce(rdev, false); kv_update_vce_dpm()
1485 cik_update_cg(rdev, RADEON_CG_BLOCK_VCE, false); kv_update_vce_dpm()
1489 pi->vce_boot_level = kv_get_vce_boot_level(rdev, radeon_new_state->evclk); kv_update_vce_dpm()
1491 ret = kv_copy_bytes_to_smc(rdev, kv_update_vce_dpm()
1501 kv_send_msg_to_smc_with_parameter(rdev, kv_update_vce_dpm()
1505 kv_enable_vce_dpm(rdev, true); kv_update_vce_dpm()
1507 kv_enable_vce_dpm(rdev, false); kv_update_vce_dpm()
1509 cik_update_cg(rdev, RADEON_CG_BLOCK_VCE, true); kv_update_vce_dpm()
1510 kv_dpm_powergate_vce(rdev, true); kv_update_vce_dpm()
1516 static int kv_update_samu_dpm(struct radeon_device *rdev, bool gate) kv_update_samu_dpm() argument
1518 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_samu_dpm()
1520 &rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table; kv_update_samu_dpm()
1529 ret = kv_copy_bytes_to_smc(rdev, kv_update_samu_dpm()
1539 kv_send_msg_to_smc_with_parameter(rdev, kv_update_samu_dpm()
1544 return kv_enable_samu_dpm(rdev, !gate); kv_update_samu_dpm()
1547 static u8 kv_get_acp_boot_level(struct radeon_device *rdev) kv_get_acp_boot_level() argument
1551 &rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table; kv_get_acp_boot_level()
1564 static void kv_update_acp_boot_level(struct radeon_device *rdev) kv_update_acp_boot_level() argument
1566 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_acp_boot_level()
1570 acp_boot_level = kv_get_acp_boot_level(rdev); kv_update_acp_boot_level()
1573 kv_send_msg_to_smc_with_parameter(rdev, kv_update_acp_boot_level()
1580 static int kv_update_acp_dpm(struct radeon_device *rdev, bool gate) kv_update_acp_dpm() argument
1582 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_acp_dpm()
1584 &rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table; kv_update_acp_dpm()
1591 pi->acp_boot_level = kv_get_acp_boot_level(rdev); kv_update_acp_dpm()
1593 ret = kv_copy_bytes_to_smc(rdev, kv_update_acp_dpm()
1603 kv_send_msg_to_smc_with_parameter(rdev, kv_update_acp_dpm()
1608 return kv_enable_acp_dpm(rdev, !gate); kv_update_acp_dpm()
1611 void kv_dpm_powergate_uvd(struct radeon_device *rdev, bool gate) kv_dpm_powergate_uvd() argument
1613 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_powergate_uvd()
1622 uvd_v1_0_stop(rdev); kv_dpm_powergate_uvd()
1623 cik_update_cg(rdev, RADEON_CG_BLOCK_UVD, false); kv_dpm_powergate_uvd()
1625 kv_update_uvd_dpm(rdev, gate); kv_dpm_powergate_uvd()
1627 kv_notify_message_to_smu(rdev, PPSMC_MSG_UVDPowerOFF); kv_dpm_powergate_uvd()
1630 kv_notify_message_to_smu(rdev, PPSMC_MSG_UVDPowerON); kv_dpm_powergate_uvd()
1631 uvd_v4_2_resume(rdev); kv_dpm_powergate_uvd()
1632 uvd_v1_0_start(rdev); kv_dpm_powergate_uvd()
1633 cik_update_cg(rdev, RADEON_CG_BLOCK_UVD, true); kv_dpm_powergate_uvd()
1635 kv_update_uvd_dpm(rdev, gate); kv_dpm_powergate_uvd()
1639 static void kv_dpm_powergate_vce(struct radeon_device *rdev, bool gate) kv_dpm_powergate_vce() argument
1641 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_powergate_vce()
1651 kv_notify_message_to_smu(rdev, PPSMC_MSG_VCEPowerOFF); kv_dpm_powergate_vce()
1655 kv_notify_message_to_smu(rdev, PPSMC_MSG_VCEPowerON); kv_dpm_powergate_vce()
1656 vce_v2_0_resume(rdev); kv_dpm_powergate_vce()
1657 vce_v1_0_start(rdev); kv_dpm_powergate_vce()
1662 static void kv_dpm_powergate_samu(struct radeon_device *rdev, bool gate) kv_dpm_powergate_samu() argument
1664 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_powergate_samu()
1672 kv_update_samu_dpm(rdev, true); kv_dpm_powergate_samu()
1674 kv_notify_message_to_smu(rdev, PPSMC_MSG_SAMPowerOFF); kv_dpm_powergate_samu()
1677 kv_notify_message_to_smu(rdev, PPSMC_MSG_SAMPowerON); kv_dpm_powergate_samu()
1678 kv_update_samu_dpm(rdev, false); kv_dpm_powergate_samu()
1682 static void kv_dpm_powergate_acp(struct radeon_device *rdev, bool gate) kv_dpm_powergate_acp() argument
1684 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_powergate_acp()
1689 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_dpm_powergate_acp()
1695 kv_update_acp_dpm(rdev, true); kv_dpm_powergate_acp()
1697 kv_notify_message_to_smu(rdev, PPSMC_MSG_ACPPowerOFF); kv_dpm_powergate_acp()
1700 kv_notify_message_to_smu(rdev, PPSMC_MSG_ACPPowerON); kv_dpm_powergate_acp()
1701 kv_update_acp_dpm(rdev, false); kv_dpm_powergate_acp()
1705 static void kv_set_valid_clock_range(struct radeon_device *rdev, kv_set_valid_clock_range() argument
1709 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_valid_clock_range()
1712 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_set_valid_clock_range()
1767 static int kv_update_dfs_bypass_settings(struct radeon_device *rdev, kv_update_dfs_bypass_settings() argument
1771 struct kv_power_info *pi = kv_get_pi(rdev); kv_update_dfs_bypass_settings()
1778 ret = kv_copy_bytes_to_smc(rdev, kv_update_dfs_bypass_settings()
1790 static int kv_enable_nb_dpm(struct radeon_device *rdev, kv_enable_nb_dpm() argument
1793 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_nb_dpm()
1798 ret = kv_notify_message_to_smu(rdev, PPSMC_MSG_NBDPM_Enable); kv_enable_nb_dpm()
1804 ret = kv_notify_message_to_smu(rdev, PPSMC_MSG_NBDPM_Disable); kv_enable_nb_dpm()
1813 int kv_dpm_force_performance_level(struct radeon_device *rdev, kv_dpm_force_performance_level() argument
1819 ret = kv_force_dpm_highest(rdev); kv_dpm_force_performance_level()
1823 ret = kv_force_dpm_lowest(rdev); kv_dpm_force_performance_level()
1827 ret = kv_unforce_levels(rdev); kv_dpm_force_performance_level()
1832 rdev->pm.dpm.forced_level = level; kv_dpm_force_performance_level()
1837 int kv_dpm_pre_set_power_state(struct radeon_device *rdev) kv_dpm_pre_set_power_state() argument
1839 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_pre_set_power_state()
1840 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; kv_dpm_pre_set_power_state()
1843 kv_update_requested_ps(rdev, new_ps); kv_dpm_pre_set_power_state()
1845 kv_apply_state_adjust_rules(rdev, kv_dpm_pre_set_power_state()
1852 int kv_dpm_set_power_state(struct radeon_device *rdev) kv_dpm_set_power_state() argument
1854 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_set_power_state()
1860 ret = kv_smc_bapm_enable(rdev, rdev->pm.dpm.ac_power); kv_dpm_set_power_state()
1867 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) { kv_dpm_set_power_state()
1869 kv_set_valid_clock_range(rdev, new_ps); kv_dpm_set_power_state()
1870 kv_update_dfs_bypass_settings(rdev, new_ps); kv_dpm_set_power_state()
1871 ret = kv_calculate_ds_divider(rdev); kv_dpm_set_power_state()
1876 kv_calculate_nbps_level_settings(rdev); kv_dpm_set_power_state()
1877 kv_calculate_dpm_settings(rdev); kv_dpm_set_power_state()
1878 kv_force_lowest_valid(rdev); kv_dpm_set_power_state()
1879 kv_enable_new_levels(rdev); kv_dpm_set_power_state()
1880 kv_upload_dpm_settings(rdev); kv_dpm_set_power_state()
1881 kv_program_nbps_index_settings(rdev, new_ps); kv_dpm_set_power_state()
1882 kv_unforce_levels(rdev); kv_dpm_set_power_state()
1883 kv_set_enabled_levels(rdev); kv_dpm_set_power_state()
1884 kv_force_lowest_valid(rdev); kv_dpm_set_power_state()
1885 kv_unforce_levels(rdev); kv_dpm_set_power_state()
1887 ret = kv_update_vce_dpm(rdev, new_ps, old_ps); kv_dpm_set_power_state()
1892 kv_update_sclk_t(rdev); kv_dpm_set_power_state()
1893 if (rdev->family == CHIP_MULLINS) kv_dpm_set_power_state()
1894 kv_enable_nb_dpm(rdev, true); kv_dpm_set_power_state()
1898 kv_set_valid_clock_range(rdev, new_ps); kv_dpm_set_power_state()
1899 kv_update_dfs_bypass_settings(rdev, new_ps); kv_dpm_set_power_state()
1900 ret = kv_calculate_ds_divider(rdev); kv_dpm_set_power_state()
1905 kv_calculate_nbps_level_settings(rdev); kv_dpm_set_power_state()
1906 kv_calculate_dpm_settings(rdev); kv_dpm_set_power_state()
1907 kv_freeze_sclk_dpm(rdev, true); kv_dpm_set_power_state()
1908 kv_upload_dpm_settings(rdev); kv_dpm_set_power_state()
1909 kv_program_nbps_index_settings(rdev, new_ps); kv_dpm_set_power_state()
1910 kv_freeze_sclk_dpm(rdev, false); kv_dpm_set_power_state()
1911 kv_set_enabled_levels(rdev); kv_dpm_set_power_state()
1912 ret = kv_update_vce_dpm(rdev, new_ps, old_ps); kv_dpm_set_power_state()
1917 kv_update_acp_boot_level(rdev); kv_dpm_set_power_state()
1918 kv_update_sclk_t(rdev); kv_dpm_set_power_state()
1919 kv_enable_nb_dpm(rdev, true); kv_dpm_set_power_state()
1926 void kv_dpm_post_set_power_state(struct radeon_device *rdev) kv_dpm_post_set_power_state() argument
1928 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_post_set_power_state()
1931 kv_update_current_ps(rdev, new_ps); kv_dpm_post_set_power_state()
1934 void kv_dpm_setup_asic(struct radeon_device *rdev) kv_dpm_setup_asic() argument
1936 sumo_take_smu_control(rdev, true); kv_dpm_setup_asic()
1937 kv_init_powergate_state(rdev); kv_dpm_setup_asic()
1938 kv_init_sclk_t(rdev); kv_dpm_setup_asic()
1942 void kv_dpm_reset_asic(struct radeon_device *rdev)
1944 struct kv_power_info *pi = kv_get_pi(rdev);
1946 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) {
1947 kv_force_lowest_valid(rdev);
1948 kv_init_graphics_levels(rdev);
1949 kv_program_bootup_state(rdev);
1950 kv_upload_dpm_settings(rdev);
1951 kv_force_lowest_valid(rdev);
1952 kv_unforce_levels(rdev);
1954 kv_init_graphics_levels(rdev);
1955 kv_program_bootup_state(rdev);
1956 kv_freeze_sclk_dpm(rdev, true);
1957 kv_upload_dpm_settings(rdev);
1958 kv_freeze_sclk_dpm(rdev, false);
1959 kv_set_enabled_level(rdev, pi->graphics_boot_level);
1966 static void kv_construct_max_power_limits_table(struct radeon_device *rdev, kv_construct_max_power_limits_table() argument
1969 struct kv_power_info *pi = kv_get_pi(rdev); kv_construct_max_power_limits_table()
1976 kv_convert_2bit_index_to_voltage(rdev, kv_construct_max_power_limits_table()
1983 static void kv_patch_voltage_values(struct radeon_device *rdev) kv_patch_voltage_values() argument
1987 &rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table; kv_patch_voltage_values()
1989 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; kv_patch_voltage_values()
1991 &rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table; kv_patch_voltage_values()
1993 &rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table; kv_patch_voltage_values()
1998 kv_convert_8bit_index_to_voltage(rdev, kv_patch_voltage_values()
2005 kv_convert_8bit_index_to_voltage(rdev, kv_patch_voltage_values()
2012 kv_convert_8bit_index_to_voltage(rdev, kv_patch_voltage_values()
2019 kv_convert_8bit_index_to_voltage(rdev, kv_patch_voltage_values()
2025 static void kv_construct_boot_state(struct radeon_device *rdev) kv_construct_boot_state() argument
2027 struct kv_power_info *pi = kv_get_pi(rdev); kv_construct_boot_state()
2039 static int kv_force_dpm_highest(struct radeon_device *rdev) kv_force_dpm_highest() argument
2044 ret = kv_dpm_get_enable_mask(rdev, &enable_mask); kv_force_dpm_highest()
2053 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_force_dpm_highest()
2054 return kv_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_DPM_ForceState, i); kv_force_dpm_highest()
2056 return kv_set_enabled_level(rdev, i); kv_force_dpm_highest()
2059 static int kv_force_dpm_lowest(struct radeon_device *rdev) kv_force_dpm_lowest() argument
2064 ret = kv_dpm_get_enable_mask(rdev, &enable_mask); kv_force_dpm_lowest()
2073 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_force_dpm_lowest()
2074 return kv_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_DPM_ForceState, i); kv_force_dpm_lowest()
2076 return kv_set_enabled_level(rdev, i); kv_force_dpm_lowest()
2079 static u8 kv_get_sleep_divider_id_from_clock(struct radeon_device *rdev, kv_get_sleep_divider_id_from_clock() argument
2082 struct kv_power_info *pi = kv_get_pi(rdev); kv_get_sleep_divider_id_from_clock()
2103 static int kv_get_high_voltage_limit(struct radeon_device *rdev, int *limit) kv_get_high_voltage_limit() argument
2105 struct kv_power_info *pi = kv_get_pi(rdev); kv_get_high_voltage_limit()
2107 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_get_high_voltage_limit()
2113 (kv_convert_8bit_index_to_voltage(rdev, table->entries[i].v) <= kv_get_high_voltage_limit()
2125 (kv_convert_2bit_index_to_voltage(rdev, table->entries[i].vid_2bit) <= kv_get_high_voltage_limit()
2137 static void kv_apply_state_adjust_rules(struct radeon_device *rdev, kv_apply_state_adjust_rules() argument
2142 struct kv_power_info *pi = kv_get_pi(rdev); kv_apply_state_adjust_rules()
2148 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_apply_state_adjust_rules()
2151 &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; kv_apply_state_adjust_rules()
2154 new_rps->evclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].evclk; kv_apply_state_adjust_rules()
2155 new_rps->ecclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].ecclk; kv_apply_state_adjust_rules()
2181 if (sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) kv_apply_state_adjust_rules()
2182 sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; kv_apply_state_adjust_rules()
2196 kv_convert_8bit_index_to_voltage(rdev, ps->levels[i].vddc_index))) { kv_apply_state_adjust_rules()
2197 kv_get_high_voltage_limit(rdev, &limit); kv_apply_state_adjust_rules()
2208 kv_convert_8bit_index_to_voltage(rdev, ps->levels[i].vddc_index))) { kv_apply_state_adjust_rules()
2209 kv_get_high_voltage_limit(rdev, &limit); kv_apply_state_adjust_rules()
2230 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) { kv_apply_state_adjust_rules()
2243 pi->video_start || (rdev->pm.dpm.new_active_crtc_count >= 3) || kv_apply_state_adjust_rules()
2253 static void kv_dpm_power_level_enabled_for_throttle(struct radeon_device *rdev, kv_dpm_power_level_enabled_for_throttle() argument
2256 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_power_level_enabled_for_throttle()
2261 static int kv_calculate_ds_divider(struct radeon_device *rdev) kv_calculate_ds_divider() argument
2263 struct kv_power_info *pi = kv_get_pi(rdev); kv_calculate_ds_divider()
2272 kv_get_sleep_divider_id_from_clock(rdev, kv_calculate_ds_divider()
2279 static int kv_calculate_nbps_level_settings(struct radeon_device *rdev) kv_calculate_nbps_level_settings() argument
2281 struct kv_power_info *pi = kv_get_pi(rdev); kv_calculate_nbps_level_settings()
2285 &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; kv_calculate_nbps_level_settings()
2291 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) { kv_calculate_nbps_level_settings()
2302 (rdev->pm.dpm.new_active_crtc_count >= 3) || pi->video_start); kv_calculate_nbps_level_settings()
2333 static int kv_calculate_dpm_settings(struct radeon_device *rdev) kv_calculate_dpm_settings() argument
2335 struct kv_power_info *pi = kv_get_pi(rdev); kv_calculate_dpm_settings()
2347 static void kv_init_graphics_levels(struct radeon_device *rdev) kv_init_graphics_levels() argument
2349 struct kv_power_info *pi = kv_get_pi(rdev); kv_init_graphics_levels()
2352 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; kv_init_graphics_levels()
2361 kv_convert_8bit_index_to_voltage(rdev, table->entries[i].v))) kv_init_graphics_levels()
2364 kv_set_divider_value(rdev, i, table->entries[i].clk); kv_init_graphics_levels()
2365 vid_2bit = kv_convert_vid7_to_vid2(rdev, kv_init_graphics_levels()
2368 kv_set_vid(rdev, i, vid_2bit); kv_init_graphics_levels()
2369 kv_set_at(rdev, i, pi->at[i]); kv_init_graphics_levels()
2370 kv_dpm_power_level_enabled_for_throttle(rdev, i, true); kv_init_graphics_levels()
2381 kv_convert_2bit_index_to_voltage(rdev, table->entries[i].vid_2bit)) kv_init_graphics_levels()
2384 kv_set_divider_value(rdev, i, table->entries[i].sclk_frequency); kv_init_graphics_levels()
2385 kv_set_vid(rdev, i, table->entries[i].vid_2bit); kv_init_graphics_levels()
2386 kv_set_at(rdev, i, pi->at[i]); kv_init_graphics_levels()
2387 kv_dpm_power_level_enabled_for_throttle(rdev, i, true); kv_init_graphics_levels()
2393 kv_dpm_power_level_enable(rdev, i, false); kv_init_graphics_levels()
2396 static void kv_enable_new_levels(struct radeon_device *rdev) kv_enable_new_levels() argument
2398 struct kv_power_info *pi = kv_get_pi(rdev); kv_enable_new_levels()
2403 kv_dpm_power_level_enable(rdev, i, true); kv_enable_new_levels()
2407 static int kv_set_enabled_level(struct radeon_device *rdev, u32 level) kv_set_enabled_level() argument
2411 return kv_send_msg_to_smc_with_parameter(rdev, kv_set_enabled_level()
2416 static int kv_set_enabled_levels(struct radeon_device *rdev) kv_set_enabled_levels() argument
2418 struct kv_power_info *pi = kv_get_pi(rdev); kv_set_enabled_levels()
2424 return kv_send_msg_to_smc_with_parameter(rdev, kv_set_enabled_levels()
2429 static void kv_program_nbps_index_settings(struct radeon_device *rdev, kv_program_nbps_index_settings() argument
2433 struct kv_power_info *pi = kv_get_pi(rdev); kv_program_nbps_index_settings()
2436 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_program_nbps_index_settings()
2451 static int kv_set_thermal_temperature_range(struct radeon_device *rdev, kv_set_thermal_temperature_range() argument
2473 rdev->pm.dpm.thermal.min_temp = low_temp; kv_set_thermal_temperature_range()
2474 rdev->pm.dpm.thermal.max_temp = high_temp; kv_set_thermal_temperature_range()
2488 static int kv_parse_sys_info_table(struct radeon_device *rdev) kv_parse_sys_info_table() argument
2490 struct kv_power_info *pi = kv_get_pi(rdev); kv_parse_sys_info_table()
2491 struct radeon_mode_info *mode_info = &rdev->mode_info; kv_parse_sys_info_table()
2538 sumo_construct_sclk_voltage_mapping_table(rdev, kv_parse_sys_info_table()
2542 sumo_construct_vid_mapping_table(rdev, kv_parse_sys_info_table()
2546 kv_construct_max_power_limits_table(rdev, kv_parse_sys_info_table()
2547 &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac); kv_parse_sys_info_table()
2573 static void kv_patch_boot_state(struct radeon_device *rdev, kv_patch_boot_state() argument
2576 struct kv_power_info *pi = kv_get_pi(rdev); kv_patch_boot_state()
2582 static void kv_parse_pplib_non_clock_info(struct radeon_device *rdev, kv_parse_pplib_non_clock_info() argument
2602 rdev->pm.dpm.boot_ps = rps; kv_parse_pplib_non_clock_info()
2603 kv_patch_boot_state(rdev, ps); kv_parse_pplib_non_clock_info()
2606 rdev->pm.dpm.uvd_ps = rps; kv_parse_pplib_non_clock_info()
2609 static void kv_parse_pplib_clock_info(struct radeon_device *rdev, kv_parse_pplib_clock_info() argument
2613 struct kv_power_info *pi = kv_get_pi(rdev); kv_parse_pplib_clock_info()
2631 static int kv_parse_power_table(struct radeon_device *rdev) kv_parse_power_table() argument
2633 struct radeon_mode_info *mode_info = &rdev->mode_info; kv_parse_power_table()
2663 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * kv_parse_power_table()
2665 if (!rdev->pm.dpm.ps) kv_parse_power_table()
2674 if (!rdev->pm.power_state[i].clock_info) kv_parse_power_table()
2678 kfree(rdev->pm.dpm.ps); kv_parse_power_table()
2681 rdev->pm.dpm.ps[i].ps_priv = ps; kv_parse_power_table()
2693 kv_parse_pplib_clock_info(rdev, kv_parse_power_table()
2694 &rdev->pm.dpm.ps[i], k, kv_parse_power_table()
2698 kv_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], kv_parse_power_table()
2703 rdev->pm.dpm.num_ps = state_array->ucNumEntries; kv_parse_power_table()
2708 clock_array_index = rdev->pm.dpm.vce_states[i].clk_idx; kv_parse_power_table()
2713 rdev->pm.dpm.vce_states[i].sclk = sclk; kv_parse_power_table()
2714 rdev->pm.dpm.vce_states[i].mclk = 0; kv_parse_power_table()
2720 int kv_dpm_init(struct radeon_device *rdev) kv_dpm_init() argument
2728 rdev->pm.dpm.priv = pi; kv_dpm_init()
2730 ret = r600_get_platform_caps(rdev); kv_dpm_init()
2734 ret = r600_parse_extended_power_table(rdev); kv_dpm_init()
2744 if (rdev->pdev->subsystem_vendor == 0x1849) kv_dpm_init()
2764 if (rdev->family == CHIP_KABINI || rdev->family == CHIP_MULLINS) kv_dpm_init()
2783 ret = kv_parse_sys_info_table(rdev); kv_dpm_init()
2787 kv_patch_voltage_values(rdev); kv_dpm_init()
2788 kv_construct_boot_state(rdev); kv_dpm_init()
2790 ret = kv_parse_power_table(rdev); kv_dpm_init()
2799 void kv_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, kv_dpm_debugfs_print_current_performance_level() argument
2802 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_debugfs_print_current_performance_level()
2815 vddc = kv_convert_8bit_index_to_voltage(rdev, (u16)tmp); kv_dpm_debugfs_print_current_performance_level()
2823 u32 kv_dpm_get_current_sclk(struct radeon_device *rdev) kv_dpm_get_current_sclk() argument
2825 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_get_current_sclk()
2839 u32 kv_dpm_get_current_mclk(struct radeon_device *rdev) kv_dpm_get_current_mclk() argument
2841 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_get_current_mclk()
2846 void kv_dpm_print_power_state(struct radeon_device *rdev, kv_dpm_print_power_state() argument
2859 kv_convert_8bit_index_to_voltage(rdev, pl->vddc_index)); kv_dpm_print_power_state()
2861 r600_dpm_print_ps_status(rdev, rps); kv_dpm_print_power_state()
2864 void kv_dpm_fini(struct radeon_device *rdev) kv_dpm_fini() argument
2868 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { kv_dpm_fini()
2869 kfree(rdev->pm.dpm.ps[i].ps_priv); kv_dpm_fini()
2871 kfree(rdev->pm.dpm.ps); kv_dpm_fini()
2872 kfree(rdev->pm.dpm.priv); kv_dpm_fini()
2873 r600_free_extended_power_table(rdev); kv_dpm_fini()
2876 void kv_dpm_display_configuration_changed(struct radeon_device *rdev) kv_dpm_display_configuration_changed() argument
2881 u32 kv_dpm_get_sclk(struct radeon_device *rdev, bool low) kv_dpm_get_sclk() argument
2883 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_get_sclk()
2892 u32 kv_dpm_get_mclk(struct radeon_device *rdev, bool low) kv_dpm_get_mclk() argument
2894 struct kv_power_info *pi = kv_get_pi(rdev); kv_dpm_get_mclk()
H A Drv770.c42 static void rv770_gpu_init(struct radeon_device *rdev);
43 void rv770_fini(struct radeon_device *rdev);
44 static void rv770_pcie_gen2_enable(struct radeon_device *rdev);
45 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
47 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) rv770_set_uvd_clocks() argument
53 if (rdev->family == CHIP_RV740) rv770_set_uvd_clocks()
54 return evergreen_set_uvd_clocks(rdev, vclk, dclk); rv770_set_uvd_clocks()
67 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, rv770_set_uvd_clocks()
87 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); rv770_set_uvd_clocks()
118 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); rv770_set_uvd_clocks()
716 static void rv770_init_golden_registers(struct radeon_device *rdev) rv770_init_golden_registers() argument
718 switch (rdev->family) { rv770_init_golden_registers()
720 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
723 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
726 if (rdev->pdev->device == 0x994e) rv770_init_golden_registers()
727 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
731 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
734 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
739 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
742 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
745 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
748 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
753 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
756 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
759 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
762 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
767 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
770 radeon_program_register_sequence(rdev, rv770_init_golden_registers()
785 * @rdev: radeon_device pointer
790 u32 rv770_get_xclk(struct radeon_device *rdev) rv770_get_xclk() argument
792 u32 reference_clock = rdev->clock.spll.reference_freq; rv770_get_xclk()
804 void rv770_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base) rv770_page_flip() argument
806 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; rv770_page_flip()
828 for (i = 0; i < rdev->usec_timeout; i++) { rv770_page_flip()
840 bool rv770_page_flip_pending(struct radeon_device *rdev, int crtc_id) rv770_page_flip_pending() argument
842 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; rv770_page_flip_pending()
850 int rv770_get_temp(struct radeon_device *rdev) rv770_get_temp() argument
869 void rv770_pm_misc(struct radeon_device *rdev) rv770_pm_misc() argument
871 int req_ps_idx = rdev->pm.requested_power_state_index; rv770_pm_misc()
872 int req_cm_idx = rdev->pm.requested_clock_mode_index; rv770_pm_misc()
873 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; rv770_pm_misc()
880 if (voltage->voltage != rdev->pm.current_vddc) { rv770_pm_misc()
881 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC); rv770_pm_misc()
882 rdev->pm.current_vddc = voltage->voltage; rv770_pm_misc()
891 static int rv770_pcie_gart_enable(struct radeon_device *rdev) rv770_pcie_gart_enable() argument
896 if (rdev->gart.robj == NULL) { rv770_pcie_gart_enable()
897 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); rv770_pcie_gart_enable()
900 r = radeon_gart_table_vram_pin(rdev); rv770_pcie_gart_enable()
917 if (rdev->family == CHIP_RV740) rv770_pcie_gart_enable()
923 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); rv770_pcie_gart_enable()
924 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); rv770_pcie_gart_enable()
925 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); rv770_pcie_gart_enable()
929 (u32)(rdev->dummy_page.addr >> 12)); rv770_pcie_gart_enable()
933 r600_pcie_gart_tlb_flush(rdev); rv770_pcie_gart_enable()
935 (unsigned)(rdev->mc.gtt_size >> 20), rv770_pcie_gart_enable()
936 (unsigned long long)rdev->gart.table_addr); rv770_pcie_gart_enable()
937 rdev->gart.ready = true; rv770_pcie_gart_enable()
941 static void rv770_pcie_gart_disable(struct radeon_device *rdev) rv770_pcie_gart_disable() argument
964 radeon_gart_table_vram_unpin(rdev); rv770_pcie_gart_disable()
967 static void rv770_pcie_gart_fini(struct radeon_device *rdev) rv770_pcie_gart_fini() argument
969 radeon_gart_fini(rdev); rv770_pcie_gart_fini()
970 rv770_pcie_gart_disable(rdev); rv770_pcie_gart_fini()
971 radeon_gart_table_vram_free(rdev); rv770_pcie_gart_fini()
975 static void rv770_agp_enable(struct radeon_device *rdev) rv770_agp_enable() argument
1002 static void rv770_mc_program(struct radeon_device *rdev) rv770_mc_program() argument
1021 rv515_mc_stop(rdev, &save); rv770_mc_program()
1022 if (r600_mc_wait_for_idle(rdev)) { rv770_mc_program()
1023 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); rv770_mc_program()
1028 if (rdev->flags & RADEON_IS_AGP) { rv770_mc_program()
1029 if (rdev->mc.vram_start < rdev->mc.gtt_start) { rv770_mc_program()
1032 rdev->mc.vram_start >> 12); rv770_mc_program()
1034 rdev->mc.gtt_end >> 12); rv770_mc_program()
1038 rdev->mc.gtt_start >> 12); rv770_mc_program()
1040 rdev->mc.vram_end >> 12); rv770_mc_program()
1044 rdev->mc.vram_start >> 12); rv770_mc_program()
1046 rdev->mc.vram_end >> 12); rv770_mc_program()
1048 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); rv770_mc_program()
1049 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; rv770_mc_program()
1050 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); rv770_mc_program()
1052 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); rv770_mc_program()
1055 if (rdev->flags & RADEON_IS_AGP) { rv770_mc_program()
1056 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); rv770_mc_program()
1057 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); rv770_mc_program()
1058 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); rv770_mc_program()
1064 if (r600_mc_wait_for_idle(rdev)) { rv770_mc_program()
1065 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); rv770_mc_program()
1067 rv515_mc_resume(rdev, &save); rv770_mc_program()
1070 rv515_vga_render_disable(rdev); rv770_mc_program()
1077 void r700_cp_stop(struct radeon_device *rdev) r700_cp_stop() argument
1079 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) r700_cp_stop()
1080 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); r700_cp_stop()
1083 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; r700_cp_stop()
1086 static int rv770_cp_load_microcode(struct radeon_device *rdev) rv770_cp_load_microcode() argument
1091 if (!rdev->me_fw || !rdev->pfp_fw) rv770_cp_load_microcode()
1094 r700_cp_stop(rdev); rv770_cp_load_microcode()
1107 fw_data = (const __be32 *)rdev->pfp_fw->data; rv770_cp_load_microcode()
1113 fw_data = (const __be32 *)rdev->me_fw->data; rv770_cp_load_microcode()
1124 void r700_cp_fini(struct radeon_device *rdev) r700_cp_fini() argument
1126 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r700_cp_fini()
1127 r700_cp_stop(rdev); r700_cp_fini()
1128 radeon_ring_fini(rdev, ring); r700_cp_fini()
1129 radeon_scratch_free(rdev, ring->rptr_save_reg); r700_cp_fini()
1132 void rv770_set_clk_bypass_mode(struct radeon_device *rdev) rv770_set_clk_bypass_mode() argument
1136 if (rdev->flags & RADEON_IS_IGP) rv770_set_clk_bypass_mode()
1144 for (i = 0; i < rdev->usec_timeout; i++) { rv770_set_clk_bypass_mode()
1154 if ((rdev->family == CHIP_RV710) || (rdev->family == CHIP_RV730)) rv770_set_clk_bypass_mode()
1164 static void rv770_gpu_init(struct radeon_device *rdev) rv770_gpu_init() argument
1188 rdev->config.rv770.tiling_group_size = 256; rv770_gpu_init()
1189 switch (rdev->family) { rv770_gpu_init()
1191 rdev->config.rv770.max_pipes = 4; rv770_gpu_init()
1192 rdev->config.rv770.max_tile_pipes = 8; rv770_gpu_init()
1193 rdev->config.rv770.max_simds = 10; rv770_gpu_init()
1194 rdev->config.rv770.max_backends = 4; rv770_gpu_init()
1195 rdev->config.rv770.max_gprs = 256; rv770_gpu_init()
1196 rdev->config.rv770.max_threads = 248; rv770_gpu_init()
1197 rdev->config.rv770.max_stack_entries = 512; rv770_gpu_init()
1198 rdev->config.rv770.max_hw_contexts = 8; rv770_gpu_init()
1199 rdev->config.rv770.max_gs_threads = 16 * 2; rv770_gpu_init()
1200 rdev->config.rv770.sx_max_export_size = 128; rv770_gpu_init()
1201 rdev->config.rv770.sx_max_export_pos_size = 16; rv770_gpu_init()
1202 rdev->config.rv770.sx_max_export_smx_size = 112; rv770_gpu_init()
1203 rdev->config.rv770.sq_num_cf_insts = 2; rv770_gpu_init()
1205 rdev->config.rv770.sx_num_of_sets = 7; rv770_gpu_init()
1206 rdev->config.rv770.sc_prim_fifo_size = 0xF9; rv770_gpu_init()
1207 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30; rv770_gpu_init()
1208 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130; rv770_gpu_init()
1211 rdev->config.rv770.max_pipes = 2; rv770_gpu_init()
1212 rdev->config.rv770.max_tile_pipes = 4; rv770_gpu_init()
1213 rdev->config.rv770.max_simds = 8; rv770_gpu_init()
1214 rdev->config.rv770.max_backends = 2; rv770_gpu_init()
1215 rdev->config.rv770.max_gprs = 128; rv770_gpu_init()
1216 rdev->config.rv770.max_threads = 248; rv770_gpu_init()
1217 rdev->config.rv770.max_stack_entries = 256; rv770_gpu_init()
1218 rdev->config.rv770.max_hw_contexts = 8; rv770_gpu_init()
1219 rdev->config.rv770.max_gs_threads = 16 * 2; rv770_gpu_init()
1220 rdev->config.rv770.sx_max_export_size = 256; rv770_gpu_init()
1221 rdev->config.rv770.sx_max_export_pos_size = 32; rv770_gpu_init()
1222 rdev->config.rv770.sx_max_export_smx_size = 224; rv770_gpu_init()
1223 rdev->config.rv770.sq_num_cf_insts = 2; rv770_gpu_init()
1225 rdev->config.rv770.sx_num_of_sets = 7; rv770_gpu_init()
1226 rdev->config.rv770.sc_prim_fifo_size = 0xf9; rv770_gpu_init()
1227 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30; rv770_gpu_init()
1228 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130; rv770_gpu_init()
1229 if (rdev->config.rv770.sx_max_export_pos_size > 16) { rv770_gpu_init()
1230 rdev->config.rv770.sx_max_export_pos_size -= 16; rv770_gpu_init()
1231 rdev->config.rv770.sx_max_export_smx_size += 16; rv770_gpu_init()
1235 rdev->config.rv770.max_pipes = 2; rv770_gpu_init()
1236 rdev->config.rv770.max_tile_pipes = 2; rv770_gpu_init()
1237 rdev->config.rv770.max_simds = 2; rv770_gpu_init()
1238 rdev->config.rv770.max_backends = 1; rv770_gpu_init()
1239 rdev->config.rv770.max_gprs = 256; rv770_gpu_init()
1240 rdev->config.rv770.max_threads = 192; rv770_gpu_init()
1241 rdev->config.rv770.max_stack_entries = 256; rv770_gpu_init()
1242 rdev->config.rv770.max_hw_contexts = 4; rv770_gpu_init()
1243 rdev->config.rv770.max_gs_threads = 8 * 2; rv770_gpu_init()
1244 rdev->config.rv770.sx_max_export_size = 128; rv770_gpu_init()
1245 rdev->config.rv770.sx_max_export_pos_size = 16; rv770_gpu_init()
1246 rdev->config.rv770.sx_max_export_smx_size = 112; rv770_gpu_init()
1247 rdev->config.rv770.sq_num_cf_insts = 1; rv770_gpu_init()
1249 rdev->config.rv770.sx_num_of_sets = 7; rv770_gpu_init()
1250 rdev->config.rv770.sc_prim_fifo_size = 0x40; rv770_gpu_init()
1251 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30; rv770_gpu_init()
1252 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130; rv770_gpu_init()
1255 rdev->config.rv770.max_pipes = 4; rv770_gpu_init()
1256 rdev->config.rv770.max_tile_pipes = 4; rv770_gpu_init()
1257 rdev->config.rv770.max_simds = 8; rv770_gpu_init()
1258 rdev->config.rv770.max_backends = 4; rv770_gpu_init()
1259 rdev->config.rv770.max_gprs = 256; rv770_gpu_init()
1260 rdev->config.rv770.max_threads = 248; rv770_gpu_init()
1261 rdev->config.rv770.max_stack_entries = 512; rv770_gpu_init()
1262 rdev->config.rv770.max_hw_contexts = 8; rv770_gpu_init()
1263 rdev->config.rv770.max_gs_threads = 16 * 2; rv770_gpu_init()
1264 rdev->config.rv770.sx_max_export_size = 256; rv770_gpu_init()
1265 rdev->config.rv770.sx_max_export_pos_size = 32; rv770_gpu_init()
1266 rdev->config.rv770.sx_max_export_smx_size = 224; rv770_gpu_init()
1267 rdev->config.rv770.sq_num_cf_insts = 2; rv770_gpu_init()
1269 rdev->config.rv770.sx_num_of_sets = 7; rv770_gpu_init()
1270 rdev->config.rv770.sc_prim_fifo_size = 0x100; rv770_gpu_init()
1271 rdev->config.rv770.sc_hiz_tile_fifo_size = 0x30; rv770_gpu_init()
1272 rdev->config.rv770.sc_earlyz_tile_fifo_fize = 0x130; rv770_gpu_init()
1274 if (rdev->config.rv770.sx_max_export_pos_size > 16) { rv770_gpu_init()
1275 rdev->config.rv770.sx_max_export_pos_size -= 16; rv770_gpu_init()
1276 rdev->config.rv770.sx_max_export_smx_size += 16; rv770_gpu_init()
1314 tmp = rdev->config.rv770.max_simds - rv770_gpu_init()
1316 rdev->config.rv770.active_simds = tmp; rv770_gpu_init()
1318 switch (rdev->config.rv770.max_tile_pipes) { rv770_gpu_init()
1333 rdev->config.rv770.tiling_npipes = rdev->config.rv770.max_tile_pipes; rv770_gpu_init()
1337 for (i = 0; i < rdev->config.rv770.max_backends; i++) rv770_gpu_init()
1341 for (i = 0; i < rdev->config.rv770.max_backends; i++) rv770_gpu_init()
1345 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.rv770.max_backends, rv770_gpu_init()
1348 rdev->config.rv770.backend_map = tmp; rv770_gpu_init()
1350 if (rdev->family == CHIP_RV770) rv770_gpu_init()
1358 rdev->config.rv770.tiling_nbanks = 4 << ((gb_tiling_config >> 4) & 0x3); rv770_gpu_init()
1371 rdev->config.rv770.tile_config = gb_tiling_config; rv770_gpu_init()
1378 if (rdev->family == CHIP_RV730) { rv770_gpu_init()
1409 smx_dc_ctl0 |= CACHE_DEPTH((rdev->config.rv770.sx_num_of_sets * 64) - 1); rv770_gpu_init()
1412 if (rdev->family != CHIP_RV740) rv770_gpu_init()
1418 if (rdev->family != CHIP_RV770) rv770_gpu_init()
1423 switch (rdev->family) { rv770_gpu_init()
1436 if (rdev->family != CHIP_RV770) { rv770_gpu_init()
1442 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.rv770.sx_max_export_size / 4) - 1) | rv770_gpu_init()
1443 POSITION_BUFFER_SIZE((rdev->config.rv770.sx_max_export_pos_size / 4) - 1) | rv770_gpu_init()
1444 SMX_BUFFER_SIZE((rdev->config.rv770.sx_max_export_smx_size / 4) - 1))); rv770_gpu_init()
1446 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.rv770.sc_prim_fifo_size) | rv770_gpu_init()
1447 SC_HIZ_TILE_FIFO_SIZE(rdev->config.rv770.sc_hiz_tile_fifo_size) | rv770_gpu_init()
1448 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.rv770.sc_earlyz_tile_fifo_fize))); rv770_gpu_init()
1458 sq_ms_fifo_sizes = (CACHE_FIFO_SIZE(16 * rdev->config.rv770.sq_num_cf_insts) | rv770_gpu_init()
1461 switch (rdev->family) { rv770_gpu_init()
1489 if (rdev->family == CHIP_RV710) rv770_gpu_init()
1495 WREG32(SQ_GPR_RESOURCE_MGMT_1, (NUM_PS_GPRS((rdev->config.rv770.max_gprs * 24)/64) | rv770_gpu_init()
1496 NUM_VS_GPRS((rdev->config.rv770.max_gprs * 24)/64) | rv770_gpu_init()
1497 NUM_CLAUSE_TEMP_GPRS(((rdev->config.rv770.max_gprs * 24)/64)/2))); rv770_gpu_init()
1499 WREG32(SQ_GPR_RESOURCE_MGMT_2, (NUM_GS_GPRS((rdev->config.rv770.max_gprs * 7)/64) | rv770_gpu_init()
1500 NUM_ES_GPRS((rdev->config.rv770.max_gprs * 7)/64))); rv770_gpu_init()
1502 sq_thread_resource_mgmt = (NUM_PS_THREADS((rdev->config.rv770.max_threads * 4)/8) | rv770_gpu_init()
1503 NUM_VS_THREADS((rdev->config.rv770.max_threads * 2)/8) | rv770_gpu_init()
1504 NUM_ES_THREADS((rdev->config.rv770.max_threads * 1)/8)); rv770_gpu_init()
1505 if (((rdev->config.rv770.max_threads * 1) / 8) > rdev->config.rv770.max_gs_threads) rv770_gpu_init()
1506 sq_thread_resource_mgmt |= NUM_GS_THREADS(rdev->config.rv770.max_gs_threads); rv770_gpu_init()
1508 sq_thread_resource_mgmt |= NUM_GS_THREADS((rdev->config.rv770.max_gs_threads * 1)/8); rv770_gpu_init()
1511 WREG32(SQ_STACK_RESOURCE_MGMT_1, (NUM_PS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4) | rv770_gpu_init()
1512 NUM_VS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4))); rv770_gpu_init()
1514 WREG32(SQ_STACK_RESOURCE_MGMT_2, (NUM_GS_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4) | rv770_gpu_init()
1515 NUM_ES_STACK_ENTRIES((rdev->config.rv770.max_stack_entries * 1)/4))); rv770_gpu_init()
1517 sq_dyn_gpr_size_simd_ab_0 = (SIMDA_RING0((rdev->config.rv770.max_gprs * 38)/64) | rv770_gpu_init()
1518 SIMDA_RING1((rdev->config.rv770.max_gprs * 38)/64) | rv770_gpu_init()
1519 SIMDB_RING0((rdev->config.rv770.max_gprs * 38)/64) | rv770_gpu_init()
1520 SIMDB_RING1((rdev->config.rv770.max_gprs * 38)/64)); rv770_gpu_init()
1534 if (rdev->family == CHIP_RV710) rv770_gpu_init()
1541 switch (rdev->family) { rv770_gpu_init()
1554 num_gs_verts_per_thread = rdev->config.rv770.max_pipes * 16; rv770_gpu_init()
1600 void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc) r700_vram_gtt_location() argument
1606 dev_warn(rdev->dev, "limiting VRAM\n"); r700_vram_gtt_location()
1610 if (rdev->flags & RADEON_IS_AGP) { r700_vram_gtt_location()
1615 dev_warn(rdev->dev, "limiting VRAM\n"); r700_vram_gtt_location()
1622 dev_warn(rdev->dev, "limiting VRAM\n"); r700_vram_gtt_location()
1629 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n", r700_vram_gtt_location()
1633 radeon_vram_location(rdev, &rdev->mc, 0); r700_vram_gtt_location()
1634 rdev->mc.gtt_base_align = 0; r700_vram_gtt_location()
1635 radeon_gtt_location(rdev, mc); r700_vram_gtt_location()
1639 static int rv770_mc_init(struct radeon_device *rdev) rv770_mc_init() argument
1645 rdev->mc.vram_is_ddr = true; rv770_mc_init()
1670 rdev->mc.vram_width = numchan * chansize; rv770_mc_init()
1672 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); rv770_mc_init()
1673 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); rv770_mc_init()
1675 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); rv770_mc_init()
1676 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); rv770_mc_init()
1677 rdev->mc.visible_vram_size = rdev->mc.aper_size; rv770_mc_init()
1678 r700_vram_gtt_location(rdev, &rdev->mc); rv770_mc_init()
1679 radeon_update_bandwidth_info(rdev); rv770_mc_init()
1684 static int rv770_startup(struct radeon_device *rdev) rv770_startup() argument
1690 rv770_pcie_gen2_enable(rdev); rv770_startup()
1693 r = r600_vram_scratch_init(rdev); rv770_startup()
1697 rv770_mc_program(rdev); rv770_startup()
1699 if (rdev->flags & RADEON_IS_AGP) { rv770_startup()
1700 rv770_agp_enable(rdev); rv770_startup()
1702 r = rv770_pcie_gart_enable(rdev); rv770_startup()
1707 rv770_gpu_init(rdev); rv770_startup()
1710 r = radeon_wb_init(rdev); rv770_startup()
1714 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); rv770_startup()
1716 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); rv770_startup()
1720 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); rv770_startup()
1722 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); rv770_startup()
1726 r = uvd_v2_2_resume(rdev); rv770_startup()
1728 r = radeon_fence_driver_start_ring(rdev, rv770_startup()
1731 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); rv770_startup()
1735 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; rv770_startup()
1738 if (!rdev->irq.installed) { rv770_startup()
1739 r = radeon_irq_kms_init(rdev); rv770_startup()
1744 r = r600_irq_init(rdev); rv770_startup()
1747 radeon_irq_kms_fini(rdev); rv770_startup()
1750 r600_irq_set(rdev); rv770_startup()
1752 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; rv770_startup()
1753 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, rv770_startup()
1758 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; rv770_startup()
1759 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, rv770_startup()
1764 r = rv770_cp_load_microcode(rdev); rv770_startup()
1767 r = r600_cp_resume(rdev); rv770_startup()
1771 r = r600_dma_resume(rdev); rv770_startup()
1775 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; rv770_startup()
1777 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, rv770_startup()
1780 r = uvd_v1_0_init(rdev); rv770_startup()
1786 r = radeon_ib_pool_init(rdev); rv770_startup()
1788 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); rv770_startup()
1792 r = radeon_audio_init(rdev); rv770_startup()
1801 int rv770_resume(struct radeon_device *rdev) rv770_resume() argument
1810 atom_asic_init(rdev->mode_info.atom_context); rv770_resume()
1813 rv770_init_golden_registers(rdev); rv770_resume()
1815 if (rdev->pm.pm_method == PM_METHOD_DPM) rv770_resume()
1816 radeon_pm_resume(rdev); rv770_resume()
1818 rdev->accel_working = true; rv770_resume()
1819 r = rv770_startup(rdev); rv770_resume()
1822 rdev->accel_working = false; rv770_resume()
1830 int rv770_suspend(struct radeon_device *rdev) rv770_suspend() argument
1832 radeon_pm_suspend(rdev); rv770_suspend()
1833 radeon_audio_fini(rdev); rv770_suspend()
1834 uvd_v1_0_fini(rdev); rv770_suspend()
1835 radeon_uvd_suspend(rdev); rv770_suspend()
1836 r700_cp_stop(rdev); rv770_suspend()
1837 r600_dma_stop(rdev); rv770_suspend()
1838 r600_irq_suspend(rdev); rv770_suspend()
1839 radeon_wb_disable(rdev); rv770_suspend()
1840 rv770_pcie_gart_disable(rdev); rv770_suspend()
1851 int rv770_init(struct radeon_device *rdev) rv770_init() argument
1856 if (!radeon_get_bios(rdev)) { rv770_init()
1857 if (ASIC_IS_AVIVO(rdev)) rv770_init()
1861 if (!rdev->is_atom_bios) { rv770_init()
1862 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n"); rv770_init()
1865 r = radeon_atombios_init(rdev); rv770_init()
1869 if (!radeon_card_posted(rdev)) { rv770_init()
1870 if (!rdev->bios) { rv770_init()
1871 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); rv770_init()
1875 atom_asic_init(rdev->mode_info.atom_context); rv770_init()
1878 rv770_init_golden_registers(rdev); rv770_init()
1880 r600_scratch_init(rdev); rv770_init()
1882 radeon_surface_init(rdev); rv770_init()
1884 radeon_get_clock_info(rdev->ddev); rv770_init()
1886 r = radeon_fence_driver_init(rdev); rv770_init()
1890 if (rdev->flags & RADEON_IS_AGP) { rv770_init()
1891 r = radeon_agp_init(rdev); rv770_init()
1893 radeon_agp_disable(rdev); rv770_init()
1895 r = rv770_mc_init(rdev); rv770_init()
1899 r = radeon_bo_init(rdev); rv770_init()
1903 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { rv770_init()
1904 r = r600_init_microcode(rdev); rv770_init()
1912 radeon_pm_init(rdev); rv770_init()
1914 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL; rv770_init()
1915 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024); rv770_init()
1917 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL; rv770_init()
1918 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024); rv770_init()
1920 r = radeon_uvd_init(rdev); rv770_init()
1922 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; rv770_init()
1923 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], rv770_init()
1927 rdev->ih.ring_obj = NULL; rv770_init()
1928 r600_ih_ring_init(rdev, 64 * 1024); rv770_init()
1930 r = r600_pcie_gart_init(rdev); rv770_init()
1934 rdev->accel_working = true; rv770_init()
1935 r = rv770_startup(rdev); rv770_init()
1937 dev_err(rdev->dev, "disabling GPU acceleration\n"); rv770_init()
1938 r700_cp_fini(rdev); rv770_init()
1939 r600_dma_fini(rdev); rv770_init()
1940 r600_irq_fini(rdev); rv770_init()
1941 radeon_wb_fini(rdev); rv770_init()
1942 radeon_ib_pool_fini(rdev); rv770_init()
1943 radeon_irq_kms_fini(rdev); rv770_init()
1944 rv770_pcie_gart_fini(rdev); rv770_init()
1945 rdev->accel_working = false; rv770_init()
1951 void rv770_fini(struct radeon_device *rdev) rv770_fini() argument
1953 radeon_pm_fini(rdev); rv770_fini()
1954 r700_cp_fini(rdev); rv770_fini()
1955 r600_dma_fini(rdev); rv770_fini()
1956 r600_irq_fini(rdev); rv770_fini()
1957 radeon_wb_fini(rdev); rv770_fini()
1958 radeon_ib_pool_fini(rdev); rv770_fini()
1959 radeon_irq_kms_fini(rdev); rv770_fini()
1960 uvd_v1_0_fini(rdev); rv770_fini()
1961 radeon_uvd_fini(rdev); rv770_fini()
1962 rv770_pcie_gart_fini(rdev); rv770_fini()
1963 r600_vram_scratch_fini(rdev); rv770_fini()
1964 radeon_gem_fini(rdev); rv770_fini()
1965 radeon_fence_driver_fini(rdev); rv770_fini()
1966 radeon_agp_fini(rdev); rv770_fini()
1967 radeon_bo_fini(rdev); rv770_fini()
1968 radeon_atombios_fini(rdev); rv770_fini()
1969 kfree(rdev->bios); rv770_fini()
1970 rdev->bios = NULL; rv770_fini()
1973 static void rv770_pcie_gen2_enable(struct radeon_device *rdev) rv770_pcie_gen2_enable() argument
1981 if (rdev->flags & RADEON_IS_IGP) rv770_pcie_gen2_enable()
1984 if (!(rdev->flags & RADEON_IS_PCIE)) rv770_pcie_gen2_enable()
1988 if (ASIC_IS_X2(rdev)) rv770_pcie_gen2_enable()
1991 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) && rv770_pcie_gen2_enable()
1992 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT)) rv770_pcie_gen2_enable()
H A Dni.c42 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) tn_smc_rreg() argument
47 spin_lock_irqsave(&rdev->smc_idx_lock, flags); tn_smc_rreg()
50 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); tn_smc_rreg()
54 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) tn_smc_wreg() argument
58 spin_lock_irqsave(&rdev->smc_idx_lock, flags); tn_smc_wreg()
61 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); tn_smc_wreg()
190 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
191 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
192 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
193 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
194 extern int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
195 extern void evergreen_mc_program(struct radeon_device *rdev);
196 extern void evergreen_irq_suspend(struct radeon_device *rdev);
197 extern int evergreen_mc_init(struct radeon_device *rdev);
198 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
199 extern void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
200 extern void evergreen_program_aspm(struct radeon_device *rdev);
201 extern void sumo_rlc_fini(struct radeon_device *rdev);
202 extern int sumo_rlc_init(struct radeon_device *rdev);
203 extern void evergreen_gpu_pci_config_reset(struct radeon_device *rdev);
453 static void ni_init_golden_registers(struct radeon_device *rdev) ni_init_golden_registers() argument
455 switch (rdev->family) { ni_init_golden_registers()
457 radeon_program_register_sequence(rdev, ni_init_golden_registers()
460 radeon_program_register_sequence(rdev, ni_init_golden_registers()
465 if ((rdev->pdev->device == 0x9900) || ni_init_golden_registers()
466 (rdev->pdev->device == 0x9901) || ni_init_golden_registers()
467 (rdev->pdev->device == 0x9903) || ni_init_golden_registers()
468 (rdev->pdev->device == 0x9904) || ni_init_golden_registers()
469 (rdev->pdev->device == 0x9905) || ni_init_golden_registers()
470 (rdev->pdev->device == 0x9906) || ni_init_golden_registers()
471 (rdev->pdev->device == 0x9907) || ni_init_golden_registers()
472 (rdev->pdev->device == 0x9908) || ni_init_golden_registers()
473 (rdev->pdev->device == 0x9909) || ni_init_golden_registers()
474 (rdev->pdev->device == 0x990A) || ni_init_golden_registers()
475 (rdev->pdev->device == 0x990B) || ni_init_golden_registers()
476 (rdev->pdev->device == 0x990C) || ni_init_golden_registers()
477 (rdev->pdev->device == 0x990D) || ni_init_golden_registers()
478 (rdev->pdev->device == 0x990E) || ni_init_golden_registers()
479 (rdev->pdev->device == 0x990F) || ni_init_golden_registers()
480 (rdev->pdev->device == 0x9910) || ni_init_golden_registers()
481 (rdev->pdev->device == 0x9913) || ni_init_golden_registers()
482 (rdev->pdev->device == 0x9917) || ni_init_golden_registers()
483 (rdev->pdev->device == 0x9918)) { ni_init_golden_registers()
484 radeon_program_register_sequence(rdev, ni_init_golden_registers()
487 radeon_program_register_sequence(rdev, ni_init_golden_registers()
491 radeon_program_register_sequence(rdev, ni_init_golden_registers()
494 radeon_program_register_sequence(rdev, ni_init_golden_registers()
634 int ni_mc_load_microcode(struct radeon_device *rdev) ni_mc_load_microcode() argument
641 if (!rdev->mc_fw) ni_mc_load_microcode()
644 switch (rdev->family) { ni_mc_load_microcode()
687 fw_data = (const __be32 *)rdev->mc_fw->data; ni_mc_load_microcode()
697 for (i = 0; i < rdev->usec_timeout; i++) { ni_mc_load_microcode()
710 int ni_init_microcode(struct radeon_device *rdev) ni_init_microcode() argument
721 switch (rdev->family) { ni_init_microcode()
773 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); ni_init_microcode()
776 if (rdev->pfp_fw->size != pfp_req_size) { ni_init_microcode()
779 rdev->pfp_fw->size, fw_name); ni_init_microcode()
785 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); ni_init_microcode()
788 if (rdev->me_fw->size != me_req_size) { ni_init_microcode()
791 rdev->me_fw->size, fw_name); ni_init_microcode()
796 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); ni_init_microcode()
799 if (rdev->rlc_fw->size != rlc_req_size) { ni_init_microcode()
802 rdev->rlc_fw->size, fw_name); ni_init_microcode()
807 if (!(rdev->flags & RADEON_IS_IGP)) { ni_init_microcode()
809 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); ni_init_microcode()
812 if (rdev->mc_fw->size != mc_req_size) { ni_init_microcode()
815 rdev->mc_fw->size, fw_name); ni_init_microcode()
820 if ((rdev->family >= CHIP_BARTS) && (rdev->family <= CHIP_CAYMAN)) { ni_init_microcode()
822 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); ni_init_microcode()
827 release_firmware(rdev->smc_fw); ni_init_microcode()
828 rdev->smc_fw = NULL; ni_init_microcode()
830 } else if (rdev->smc_fw->size != smc_req_size) { ni_init_microcode()
833 rdev->mc_fw->size, fw_name); ni_init_microcode()
844 release_firmware(rdev->pfp_fw); ni_init_microcode()
845 rdev->pfp_fw = NULL; ni_init_microcode()
846 release_firmware(rdev->me_fw); ni_init_microcode()
847 rdev->me_fw = NULL; ni_init_microcode()
848 release_firmware(rdev->rlc_fw); ni_init_microcode()
849 rdev->rlc_fw = NULL; ni_init_microcode()
850 release_firmware(rdev->mc_fw); ni_init_microcode()
851 rdev->mc_fw = NULL; ni_init_microcode()
859 * @rdev: radeon_device pointer
866 int cayman_get_allowed_info_register(struct radeon_device *rdev, cayman_get_allowed_info_register() argument
885 int tn_get_temp(struct radeon_device *rdev) tn_get_temp() argument
896 static void cayman_gpu_init(struct radeon_device *rdev) cayman_gpu_init() argument
909 switch (rdev->family) { cayman_gpu_init()
911 rdev->config.cayman.max_shader_engines = 2; cayman_gpu_init()
912 rdev->config.cayman.max_pipes_per_simd = 4; cayman_gpu_init()
913 rdev->config.cayman.max_tile_pipes = 8; cayman_gpu_init()
914 rdev->config.cayman.max_simds_per_se = 12; cayman_gpu_init()
915 rdev->config.cayman.max_backends_per_se = 4; cayman_gpu_init()
916 rdev->config.cayman.max_texture_channel_caches = 8; cayman_gpu_init()
917 rdev->config.cayman.max_gprs = 256; cayman_gpu_init()
918 rdev->config.cayman.max_threads = 256; cayman_gpu_init()
919 rdev->config.cayman.max_gs_threads = 32; cayman_gpu_init()
920 rdev->config.cayman.max_stack_entries = 512; cayman_gpu_init()
921 rdev->config.cayman.sx_num_of_sets = 8; cayman_gpu_init()
922 rdev->config.cayman.sx_max_export_size = 256; cayman_gpu_init()
923 rdev->config.cayman.sx_max_export_pos_size = 64; cayman_gpu_init()
924 rdev->config.cayman.sx_max_export_smx_size = 192; cayman_gpu_init()
925 rdev->config.cayman.max_hw_contexts = 8; cayman_gpu_init()
926 rdev->config.cayman.sq_num_cf_insts = 2; cayman_gpu_init()
928 rdev->config.cayman.sc_prim_fifo_size = 0x100; cayman_gpu_init()
929 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; cayman_gpu_init()
930 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; cayman_gpu_init()
935 rdev->config.cayman.max_shader_engines = 1; cayman_gpu_init()
936 rdev->config.cayman.max_pipes_per_simd = 4; cayman_gpu_init()
937 rdev->config.cayman.max_tile_pipes = 2; cayman_gpu_init()
938 if ((rdev->pdev->device == 0x9900) || cayman_gpu_init()
939 (rdev->pdev->device == 0x9901) || cayman_gpu_init()
940 (rdev->pdev->device == 0x9905) || cayman_gpu_init()
941 (rdev->pdev->device == 0x9906) || cayman_gpu_init()
942 (rdev->pdev->device == 0x9907) || cayman_gpu_init()
943 (rdev->pdev->device == 0x9908) || cayman_gpu_init()
944 (rdev->pdev->device == 0x9909) || cayman_gpu_init()
945 (rdev->pdev->device == 0x990B) || cayman_gpu_init()
946 (rdev->pdev->device == 0x990C) || cayman_gpu_init()
947 (rdev->pdev->device == 0x990F) || cayman_gpu_init()
948 (rdev->pdev->device == 0x9910) || cayman_gpu_init()
949 (rdev->pdev->device == 0x9917) || cayman_gpu_init()
950 (rdev->pdev->device == 0x9999) || cayman_gpu_init()
951 (rdev->pdev->device == 0x999C)) { cayman_gpu_init()
952 rdev->config.cayman.max_simds_per_se = 6; cayman_gpu_init()
953 rdev->config.cayman.max_backends_per_se = 2; cayman_gpu_init()
954 rdev->config.cayman.max_hw_contexts = 8; cayman_gpu_init()
955 rdev->config.cayman.sx_max_export_size = 256; cayman_gpu_init()
956 rdev->config.cayman.sx_max_export_pos_size = 64; cayman_gpu_init()
957 rdev->config.cayman.sx_max_export_smx_size = 192; cayman_gpu_init()
958 } else if ((rdev->pdev->device == 0x9903) || cayman_gpu_init()
959 (rdev->pdev->device == 0x9904) || cayman_gpu_init()
960 (rdev->pdev->device == 0x990A) || cayman_gpu_init()
961 (rdev->pdev->device == 0x990D) || cayman_gpu_init()
962 (rdev->pdev->device == 0x990E) || cayman_gpu_init()
963 (rdev->pdev->device == 0x9913) || cayman_gpu_init()
964 (rdev->pdev->device == 0x9918) || cayman_gpu_init()
965 (rdev->pdev->device == 0x999D)) { cayman_gpu_init()
966 rdev->config.cayman.max_simds_per_se = 4; cayman_gpu_init()
967 rdev->config.cayman.max_backends_per_se = 2; cayman_gpu_init()
968 rdev->config.cayman.max_hw_contexts = 8; cayman_gpu_init()
969 rdev->config.cayman.sx_max_export_size = 256; cayman_gpu_init()
970 rdev->config.cayman.sx_max_export_pos_size = 64; cayman_gpu_init()
971 rdev->config.cayman.sx_max_export_smx_size = 192; cayman_gpu_init()
972 } else if ((rdev->pdev->device == 0x9919) || cayman_gpu_init()
973 (rdev->pdev->device == 0x9990) || cayman_gpu_init()
974 (rdev->pdev->device == 0x9991) || cayman_gpu_init()
975 (rdev->pdev->device == 0x9994) || cayman_gpu_init()
976 (rdev->pdev->device == 0x9995) || cayman_gpu_init()
977 (rdev->pdev->device == 0x9996) || cayman_gpu_init()
978 (rdev->pdev->device == 0x999A) || cayman_gpu_init()
979 (rdev->pdev->device == 0x99A0)) { cayman_gpu_init()
980 rdev->config.cayman.max_simds_per_se = 3; cayman_gpu_init()
981 rdev->config.cayman.max_backends_per_se = 1; cayman_gpu_init()
982 rdev->config.cayman.max_hw_contexts = 4; cayman_gpu_init()
983 rdev->config.cayman.sx_max_export_size = 128; cayman_gpu_init()
984 rdev->config.cayman.sx_max_export_pos_size = 32; cayman_gpu_init()
985 rdev->config.cayman.sx_max_export_smx_size = 96; cayman_gpu_init()
987 rdev->config.cayman.max_simds_per_se = 2; cayman_gpu_init()
988 rdev->config.cayman.max_backends_per_se = 1; cayman_gpu_init()
989 rdev->config.cayman.max_hw_contexts = 4; cayman_gpu_init()
990 rdev->config.cayman.sx_max_export_size = 128; cayman_gpu_init()
991 rdev->config.cayman.sx_max_export_pos_size = 32; cayman_gpu_init()
992 rdev->config.cayman.sx_max_export_smx_size = 96; cayman_gpu_init()
994 rdev->config.cayman.max_texture_channel_caches = 2; cayman_gpu_init()
995 rdev->config.cayman.max_gprs = 256; cayman_gpu_init()
996 rdev->config.cayman.max_threads = 256; cayman_gpu_init()
997 rdev->config.cayman.max_gs_threads = 32; cayman_gpu_init()
998 rdev->config.cayman.max_stack_entries = 512; cayman_gpu_init()
999 rdev->config.cayman.sx_num_of_sets = 8; cayman_gpu_init()
1000 rdev->config.cayman.sq_num_cf_insts = 2; cayman_gpu_init()
1002 rdev->config.cayman.sc_prim_fifo_size = 0x40; cayman_gpu_init()
1003 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; cayman_gpu_init()
1004 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; cayman_gpu_init()
1022 evergreen_fix_pci_max_read_req_size(rdev); cayman_gpu_init()
1028 rdev->config.cayman.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; cayman_gpu_init()
1029 if (rdev->config.cayman.mem_row_size_in_kb > 4) cayman_gpu_init()
1030 rdev->config.cayman.mem_row_size_in_kb = 4; cayman_gpu_init()
1032 rdev->config.cayman.shader_engine_tile_size = 32; cayman_gpu_init()
1033 rdev->config.cayman.num_gpus = 1; cayman_gpu_init()
1034 rdev->config.cayman.multi_gpu_tile_size = 64; cayman_gpu_init()
1037 rdev->config.cayman.num_tile_pipes = (1 << tmp); cayman_gpu_init()
1039 rdev->config.cayman.mem_max_burst_length_bytes = (tmp + 1) * 256; cayman_gpu_init()
1041 rdev->config.cayman.num_shader_engines = tmp + 1; cayman_gpu_init()
1043 rdev->config.cayman.num_gpus = tmp + 1; cayman_gpu_init()
1045 rdev->config.cayman.multi_gpu_tile_size = 1 << tmp; cayman_gpu_init()
1047 rdev->config.cayman.mem_row_size_in_kb = 1 << tmp; cayman_gpu_init()
1057 rdev->config.cayman.tile_config = 0; cayman_gpu_init()
1058 switch (rdev->config.cayman.num_tile_pipes) { cayman_gpu_init()
1061 rdev->config.cayman.tile_config |= (0 << 0); cayman_gpu_init()
1064 rdev->config.cayman.tile_config |= (1 << 0); cayman_gpu_init()
1067 rdev->config.cayman.tile_config |= (2 << 0); cayman_gpu_init()
1070 rdev->config.cayman.tile_config |= (3 << 0); cayman_gpu_init()
1075 if (rdev->flags & RADEON_IS_IGP) cayman_gpu_init()
1076 rdev->config.cayman.tile_config |= 1 << 4; cayman_gpu_init()
1080 rdev->config.cayman.tile_config |= 0 << 4; cayman_gpu_init()
1083 rdev->config.cayman.tile_config |= 1 << 4; cayman_gpu_init()
1087 rdev->config.cayman.tile_config |= 2 << 4; cayman_gpu_init()
1091 rdev->config.cayman.tile_config |= cayman_gpu_init()
1093 rdev->config.cayman.tile_config |= cayman_gpu_init()
1097 for (i = (rdev->config.cayman.max_shader_engines - 1); i >= 0; i--) { cayman_gpu_init()
1109 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++) cayman_gpu_init()
1113 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++) cayman_gpu_init()
1117 for (i = 0; i < rdev->config.cayman.max_shader_engines; i++) { cayman_gpu_init()
1123 simd_disable_bitmap |= 0xffffffff << rdev->config.cayman.max_simds_per_se; cayman_gpu_init()
1127 rdev->config.cayman.active_simds = hweight32(~tmp); cayman_gpu_init()
1134 if (ASIC_IS_DCE6(rdev)) cayman_gpu_init()
1143 if ((rdev->config.cayman.max_backends_per_se == 1) && cayman_gpu_init()
1144 (rdev->flags & RADEON_IS_IGP)) { cayman_gpu_init()
1154 tmp = r6xx_remap_render_backend(rdev, tmp, cayman_gpu_init()
1155 rdev->config.cayman.max_backends_per_se * cayman_gpu_init()
1156 rdev->config.cayman.max_shader_engines, cayman_gpu_init()
1162 for (i = 0; i < rdev->config.cayman.max_texture_channel_caches; i++) cayman_gpu_init()
1184 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.cayman.sx_num_of_sets); cayman_gpu_init()
1200 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.cayman.sx_max_export_size / 4) - 1) | cayman_gpu_init()
1201 POSITION_BUFFER_SIZE((rdev->config.cayman.sx_max_export_pos_size / 4) - 1) | cayman_gpu_init()
1202 SMX_BUFFER_SIZE((rdev->config.cayman.sx_max_export_smx_size / 4) - 1))); cayman_gpu_init()
1204 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.cayman.sc_prim_fifo_size) | cayman_gpu_init()
1205 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_hiz_tile_fifo_size) | cayman_gpu_init()
1206 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_earlyz_tile_fifo_size))); cayman_gpu_init()
1213 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.cayman.sq_num_cf_insts) | cayman_gpu_init()
1256 if (rdev->family == CHIP_ARUBA) { cayman_gpu_init()
1269 void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev) cayman_pcie_gart_tlb_flush() argument
1278 static int cayman_pcie_gart_enable(struct radeon_device *rdev) cayman_pcie_gart_enable() argument
1282 if (rdev->gart.robj == NULL) { cayman_pcie_gart_enable()
1283 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); cayman_pcie_gart_enable()
1286 r = radeon_gart_table_vram_pin(rdev); cayman_pcie_gart_enable()
1309 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); cayman_pcie_gart_enable()
1310 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); cayman_pcie_gart_enable()
1311 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); cayman_pcie_gart_enable()
1313 (u32)(rdev->dummy_page.addr >> 12)); cayman_pcie_gart_enable()
1330 rdev->vm_manager.max_pfn - 1); cayman_pcie_gart_enable()
1332 rdev->vm_manager.saved_table_addr[i]); cayman_pcie_gart_enable()
1337 (u32)(rdev->dummy_page.addr >> 12)); cayman_pcie_gart_enable()
1354 cayman_pcie_gart_tlb_flush(rdev); cayman_pcie_gart_enable()
1356 (unsigned)(rdev->mc.gtt_size >> 20), cayman_pcie_gart_enable()
1357 (unsigned long long)rdev->gart.table_addr); cayman_pcie_gart_enable()
1358 rdev->gart.ready = true; cayman_pcie_gart_enable()
1362 static void cayman_pcie_gart_disable(struct radeon_device *rdev) cayman_pcie_gart_disable() argument
1367 rdev->vm_manager.saved_table_addr[i] = RREG32( cayman_pcie_gart_disable()
1386 radeon_gart_table_vram_unpin(rdev); cayman_pcie_gart_disable()
1389 static void cayman_pcie_gart_fini(struct radeon_device *rdev) cayman_pcie_gart_fini() argument
1391 cayman_pcie_gart_disable(rdev); cayman_pcie_gart_fini()
1392 radeon_gart_table_vram_free(rdev); cayman_pcie_gart_fini()
1393 radeon_gart_fini(rdev); cayman_pcie_gart_fini()
1396 void cayman_cp_int_cntl_setup(struct radeon_device *rdev, cayman_cp_int_cntl_setup() argument
1408 void cayman_fence_ring_emit(struct radeon_device *rdev, cayman_fence_ring_emit() argument
1411 struct radeon_ring *ring = &rdev->ring[fence->ring]; cayman_fence_ring_emit()
1412 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; cayman_fence_ring_emit()
1431 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) cayman_ring_ib_execute() argument
1433 struct radeon_ring *ring = &rdev->ring[ib->ring]; cayman_ring_ib_execute()
1467 static void cayman_cp_enable(struct radeon_device *rdev, bool enable) cayman_cp_enable() argument
1472 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) cayman_cp_enable()
1473 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); cayman_cp_enable()
1476 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; cayman_cp_enable()
1480 u32 cayman_gfx_get_rptr(struct radeon_device *rdev, cayman_gfx_get_rptr() argument
1485 if (rdev->wb.enabled) cayman_gfx_get_rptr()
1486 rptr = rdev->wb.wb[ring->rptr_offs/4]; cayman_gfx_get_rptr()
1499 u32 cayman_gfx_get_wptr(struct radeon_device *rdev, cayman_gfx_get_wptr() argument
1514 void cayman_gfx_set_wptr(struct radeon_device *rdev, cayman_gfx_set_wptr() argument
1529 static int cayman_cp_load_microcode(struct radeon_device *rdev) cayman_cp_load_microcode() argument
1534 if (!rdev->me_fw || !rdev->pfp_fw) cayman_cp_load_microcode()
1537 cayman_cp_enable(rdev, false); cayman_cp_load_microcode()
1539 fw_data = (const __be32 *)rdev->pfp_fw->data; cayman_cp_load_microcode()
1545 fw_data = (const __be32 *)rdev->me_fw->data; cayman_cp_load_microcode()
1556 static int cayman_cp_start(struct radeon_device *rdev) cayman_cp_start() argument
1558 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cayman_cp_start()
1561 r = radeon_ring_lock(rdev, ring, 7); cayman_cp_start()
1569 radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1); cayman_cp_start()
1573 radeon_ring_unlock_commit(rdev, ring, false); cayman_cp_start()
1575 cayman_cp_enable(rdev, true); cayman_cp_start()
1577 r = radeon_ring_lock(rdev, ring, cayman_default_size + 19); cayman_cp_start()
1615 radeon_ring_unlock_commit(rdev, ring, false); cayman_cp_start()
1622 static void cayman_cp_fini(struct radeon_device *rdev) cayman_cp_fini() argument
1624 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cayman_cp_fini()
1625 cayman_cp_enable(rdev, false); cayman_cp_fini()
1626 radeon_ring_fini(rdev, ring); cayman_cp_fini()
1627 radeon_scratch_free(rdev, ring->rptr_save_reg); cayman_cp_fini()
1630 static int cayman_cp_resume(struct radeon_device *rdev) cayman_cp_resume() argument
1691 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); cayman_cp_resume()
1699 ring = &rdev->ring[ridx[i]]; cayman_cp_resume()
1708 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET; cayman_cp_resume()
1715 ring = &rdev->ring[ridx[i]]; cayman_cp_resume()
1721 ring = &rdev->ring[ridx[i]]; cayman_cp_resume()
1733 cayman_cp_start(rdev); cayman_cp_resume()
1734 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; cayman_cp_resume()
1735 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; cayman_cp_resume()
1736 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; cayman_cp_resume()
1738 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); cayman_cp_resume()
1740 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; cayman_cp_resume()
1741 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; cayman_cp_resume()
1742 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; cayman_cp_resume()
1746 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) cayman_cp_resume()
1747 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); cayman_cp_resume()
1752 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev) cayman_gpu_check_soft_reset() argument
1813 if (evergreen_is_display_hung(rdev)) cayman_gpu_check_soft_reset()
1830 static void cayman_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) cayman_gpu_soft_reset() argument
1839 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); cayman_gpu_soft_reset()
1841 evergreen_print_gpu_status_regs(rdev); cayman_gpu_soft_reset()
1842 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_ADDR 0x%08X\n", cayman_gpu_soft_reset()
1844 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_STATUS 0x%08X\n", cayman_gpu_soft_reset()
1846 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", cayman_gpu_soft_reset()
1848 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", cayman_gpu_soft_reset()
1870 evergreen_mc_stop(rdev, &save); cayman_gpu_soft_reset()
1871 if (evergreen_mc_wait_for_idle(rdev)) { cayman_gpu_soft_reset()
1872 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); cayman_gpu_soft_reset()
1920 if (!(rdev->flags & RADEON_IS_IGP)) { cayman_gpu_soft_reset()
1928 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); cayman_gpu_soft_reset()
1942 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); cayman_gpu_soft_reset()
1956 evergreen_mc_resume(rdev, &save); cayman_gpu_soft_reset()
1959 evergreen_print_gpu_status_regs(rdev); cayman_gpu_soft_reset()
1962 int cayman_asic_reset(struct radeon_device *rdev) cayman_asic_reset() argument
1966 reset_mask = cayman_gpu_check_soft_reset(rdev); cayman_asic_reset()
1969 r600_set_bios_scratch_engine_hung(rdev, true); cayman_asic_reset()
1971 cayman_gpu_soft_reset(rdev, reset_mask); cayman_asic_reset()
1973 reset_mask = cayman_gpu_check_soft_reset(rdev); cayman_asic_reset()
1976 evergreen_gpu_pci_config_reset(rdev); cayman_asic_reset()
1978 r600_set_bios_scratch_engine_hung(rdev, false); cayman_asic_reset()
1986 * @rdev: radeon_device pointer
1992 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cayman_gfx_is_lockup() argument
1994 u32 reset_mask = cayman_gpu_check_soft_reset(rdev); cayman_gfx_is_lockup()
1999 radeon_ring_lockup_update(rdev, ring); cayman_gfx_is_lockup()
2002 return radeon_ring_test_lockup(rdev, ring); cayman_gfx_is_lockup()
2005 static int cayman_startup(struct radeon_device *rdev) cayman_startup() argument
2007 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cayman_startup()
2011 evergreen_pcie_gen2_enable(rdev); cayman_startup()
2013 evergreen_program_aspm(rdev); cayman_startup()
2016 r = r600_vram_scratch_init(rdev); cayman_startup()
2020 evergreen_mc_program(rdev); cayman_startup()
2022 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) { cayman_startup()
2023 r = ni_mc_load_microcode(rdev); cayman_startup()
2030 r = cayman_pcie_gart_enable(rdev); cayman_startup()
2033 cayman_gpu_init(rdev); cayman_startup()
2036 if (rdev->flags & RADEON_IS_IGP) { cayman_startup()
2037 rdev->rlc.reg_list = tn_rlc_save_restore_register_list; cayman_startup()
2038 rdev->rlc.reg_list_size = cayman_startup()
2040 rdev->rlc.cs_data = cayman_cs_data; cayman_startup()
2041 r = sumo_rlc_init(rdev); cayman_startup()
2049 r = radeon_wb_init(rdev); cayman_startup()
2053 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); cayman_startup()
2055 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cayman_startup()
2059 r = uvd_v2_2_resume(rdev); cayman_startup()
2061 r = radeon_fence_driver_start_ring(rdev, cayman_startup()
2064 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); cayman_startup()
2067 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; cayman_startup()
2069 if (rdev->family == CHIP_ARUBA) { cayman_startup()
2070 r = radeon_vce_resume(rdev); cayman_startup()
2072 r = vce_v1_0_resume(rdev); cayman_startup()
2075 r = radeon_fence_driver_start_ring(rdev, cayman_startup()
2078 r = radeon_fence_driver_start_ring(rdev, cayman_startup()
2082 dev_err(rdev->dev, "VCE init error (%d).\n", r); cayman_startup()
2083 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; cayman_startup()
2084 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; cayman_startup()
2088 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); cayman_startup()
2090 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cayman_startup()
2094 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); cayman_startup()
2096 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cayman_startup()
2100 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); cayman_startup()
2102 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); cayman_startup()
2106 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); cayman_startup()
2108 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); cayman_startup()
2113 if (!rdev->irq.installed) { cayman_startup()
2114 r = radeon_irq_kms_init(rdev); cayman_startup()
2119 r = r600_irq_init(rdev); cayman_startup()
2122 radeon_irq_kms_fini(rdev); cayman_startup()
2125 evergreen_irq_set(rdev); cayman_startup()
2127 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, cayman_startup()
2132 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cayman_startup()
2133 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, cayman_startup()
2138 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cayman_startup()
2139 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, cayman_startup()
2144 r = cayman_cp_load_microcode(rdev); cayman_startup()
2147 r = cayman_cp_resume(rdev); cayman_startup()
2151 r = cayman_dma_resume(rdev); cayman_startup()
2155 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; cayman_startup()
2157 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, cayman_startup()
2160 r = uvd_v1_0_init(rdev); cayman_startup()
2165 if (rdev->family == CHIP_ARUBA) { cayman_startup()
2166 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; cayman_startup()
2168 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); cayman_startup()
2170 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; cayman_startup()
2172 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); cayman_startup()
2175 r = vce_v1_0_init(rdev); cayman_startup()
2180 r = radeon_ib_pool_init(rdev); cayman_startup()
2182 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); cayman_startup()
2186 r = radeon_vm_manager_init(rdev); cayman_startup()
2188 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); cayman_startup()
2192 r = radeon_audio_init(rdev); cayman_startup()
2199 int cayman_resume(struct radeon_device *rdev) cayman_resume() argument
2208 atom_asic_init(rdev->mode_info.atom_context); cayman_resume()
2211 ni_init_golden_registers(rdev); cayman_resume()
2213 if (rdev->pm.pm_method == PM_METHOD_DPM) cayman_resume()
2214 radeon_pm_resume(rdev); cayman_resume()
2216 rdev->accel_working = true; cayman_resume()
2217 r = cayman_startup(rdev); cayman_resume()
2220 rdev->accel_working = false; cayman_resume()
2226 int cayman_suspend(struct radeon_device *rdev) cayman_suspend() argument
2228 radeon_pm_suspend(rdev); cayman_suspend()
2229 radeon_audio_fini(rdev); cayman_suspend()
2230 radeon_vm_manager_fini(rdev); cayman_suspend()
2231 cayman_cp_enable(rdev, false); cayman_suspend()
2232 cayman_dma_stop(rdev); cayman_suspend()
2233 uvd_v1_0_fini(rdev); cayman_suspend()
2234 radeon_uvd_suspend(rdev); cayman_suspend()
2235 evergreen_irq_suspend(rdev); cayman_suspend()
2236 radeon_wb_disable(rdev); cayman_suspend()
2237 cayman_pcie_gart_disable(rdev); cayman_suspend()
2247 int cayman_init(struct radeon_device *rdev) cayman_init() argument
2249 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cayman_init()
2253 if (!radeon_get_bios(rdev)) { cayman_init()
2254 if (ASIC_IS_AVIVO(rdev)) cayman_init()
2258 if (!rdev->is_atom_bios) { cayman_init()
2259 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); cayman_init()
2262 r = radeon_atombios_init(rdev); cayman_init()
2267 if (!radeon_card_posted(rdev)) { cayman_init()
2268 if (!rdev->bios) { cayman_init()
2269 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); cayman_init()
2273 atom_asic_init(rdev->mode_info.atom_context); cayman_init()
2276 ni_init_golden_registers(rdev); cayman_init()
2278 r600_scratch_init(rdev); cayman_init()
2280 radeon_surface_init(rdev); cayman_init()
2282 radeon_get_clock_info(rdev->ddev); cayman_init()
2284 r = radeon_fence_driver_init(rdev); cayman_init()
2288 r = evergreen_mc_init(rdev); cayman_init()
2292 r = radeon_bo_init(rdev); cayman_init()
2296 if (rdev->flags & RADEON_IS_IGP) { cayman_init()
2297 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { cayman_init()
2298 r = ni_init_microcode(rdev); cayman_init()
2305 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) { cayman_init()
2306 r = ni_init_microcode(rdev); cayman_init()
2315 radeon_pm_init(rdev); cayman_init()
2318 r600_ring_init(rdev, ring, 1024 * 1024); cayman_init()
2320 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cayman_init()
2322 r600_ring_init(rdev, ring, 64 * 1024); cayman_init()
2324 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cayman_init()
2326 r600_ring_init(rdev, ring, 64 * 1024); cayman_init()
2328 r = radeon_uvd_init(rdev); cayman_init()
2330 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; cayman_init()
2332 r600_ring_init(rdev, ring, 4096); cayman_init()
2335 if (rdev->family == CHIP_ARUBA) { cayman_init()
2336 r = radeon_vce_init(rdev); cayman_init()
2338 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; cayman_init()
2340 r600_ring_init(rdev, ring, 4096); cayman_init()
2342 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; cayman_init()
2344 r600_ring_init(rdev, ring, 4096); cayman_init()
2348 rdev->ih.ring_obj = NULL; cayman_init()
2349 r600_ih_ring_init(rdev, 64 * 1024); cayman_init()
2351 r = r600_pcie_gart_init(rdev); cayman_init()
2355 rdev->accel_working = true; cayman_init()
2356 r = cayman_startup(rdev); cayman_init()
2358 dev_err(rdev->dev, "disabling GPU acceleration\n"); cayman_init()
2359 cayman_cp_fini(rdev); cayman_init()
2360 cayman_dma_fini(rdev); cayman_init()
2361 r600_irq_fini(rdev); cayman_init()
2362 if (rdev->flags & RADEON_IS_IGP) cayman_init()
2363 sumo_rlc_fini(rdev); cayman_init()
2364 radeon_wb_fini(rdev); cayman_init()
2365 radeon_ib_pool_fini(rdev); cayman_init()
2366 radeon_vm_manager_fini(rdev); cayman_init()
2367 radeon_irq_kms_fini(rdev); cayman_init()
2368 cayman_pcie_gart_fini(rdev); cayman_init()
2369 rdev->accel_working = false; cayman_init()
2379 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { cayman_init()
2387 void cayman_fini(struct radeon_device *rdev) cayman_fini() argument
2389 radeon_pm_fini(rdev); cayman_fini()
2390 cayman_cp_fini(rdev); cayman_fini()
2391 cayman_dma_fini(rdev); cayman_fini()
2392 r600_irq_fini(rdev); cayman_fini()
2393 if (rdev->flags & RADEON_IS_IGP) cayman_fini()
2394 sumo_rlc_fini(rdev); cayman_fini()
2395 radeon_wb_fini(rdev); cayman_fini()
2396 radeon_vm_manager_fini(rdev); cayman_fini()
2397 radeon_ib_pool_fini(rdev); cayman_fini()
2398 radeon_irq_kms_fini(rdev); cayman_fini()
2399 uvd_v1_0_fini(rdev); cayman_fini()
2400 radeon_uvd_fini(rdev); cayman_fini()
2401 if (rdev->family == CHIP_ARUBA) cayman_fini()
2402 radeon_vce_fini(rdev); cayman_fini()
2403 cayman_pcie_gart_fini(rdev); cayman_fini()
2404 r600_vram_scratch_fini(rdev); cayman_fini()
2405 radeon_gem_fini(rdev); cayman_fini()
2406 radeon_fence_driver_fini(rdev); cayman_fini()
2407 radeon_bo_fini(rdev); cayman_fini()
2408 radeon_atombios_fini(rdev); cayman_fini()
2409 kfree(rdev->bios); cayman_fini()
2410 rdev->bios = NULL; cayman_fini()
2416 int cayman_vm_init(struct radeon_device *rdev) cayman_vm_init() argument
2419 rdev->vm_manager.nvm = 8; cayman_vm_init()
2421 if (rdev->flags & RADEON_IS_IGP) { cayman_vm_init()
2424 rdev->vm_manager.vram_base_offset = tmp; cayman_vm_init()
2426 rdev->vm_manager.vram_base_offset = 0; cayman_vm_init()
2430 void cayman_vm_fini(struct radeon_device *rdev) cayman_vm_fini() argument
2437 * @rdev: radeon_device pointer
2443 void cayman_vm_decode_fault(struct radeon_device *rdev, cayman_vm_decode_fault() argument
2598 * @rdev: radeon_device pointer
2603 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, cayman_vm_flush() argument
2632 int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) tn_set_vce_clocks() argument
2637 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, tn_set_vce_clocks()
H A Dr600.c100 int r600_debugfs_mc_info_init(struct radeon_device *rdev);
103 int r600_mc_wait_for_idle(struct radeon_device *rdev);
104 static void r600_gpu_init(struct radeon_device *rdev);
105 void r600_fini(struct radeon_device *rdev);
106 void r600_irq_disable(struct radeon_device *rdev);
107 static void r600_pcie_gen2_enable(struct radeon_device *rdev);
108 extern int evergreen_rlc_resume(struct radeon_device *rdev);
109 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
114 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg) r600_rcu_rreg() argument
119 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); r600_rcu_rreg()
122 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags); r600_rcu_rreg()
126 void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v) r600_rcu_wreg() argument
130 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); r600_rcu_wreg()
133 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags); r600_rcu_wreg()
136 u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg) r600_uvd_ctx_rreg() argument
141 spin_lock_irqsave(&rdev->uvd_idx_lock, flags); r600_uvd_ctx_rreg()
144 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags); r600_uvd_ctx_rreg()
148 void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v) r600_uvd_ctx_wreg() argument
152 spin_lock_irqsave(&rdev->uvd_idx_lock, flags); r600_uvd_ctx_wreg()
155 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags); r600_uvd_ctx_wreg()
161 * @rdev: radeon_device pointer
168 int r600_get_allowed_info_register(struct radeon_device *rdev, r600_get_allowed_info_register() argument
187 * @rdev: radeon_device pointer
192 u32 r600_get_xclk(struct radeon_device *rdev) r600_get_xclk() argument
194 return rdev->clock.spll.reference_freq; r600_get_xclk()
197 int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) r600_set_uvd_clocks() argument
211 if (rdev->family >= CHIP_RS780) r600_set_uvd_clocks()
221 if (rdev->clock.spll.reference_freq == 10000) r600_set_uvd_clocks()
226 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, r600_set_uvd_clocks()
232 if (rdev->family >= CHIP_RV670 && rdev->family < CHIP_RS780) r600_set_uvd_clocks()
237 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); r600_set_uvd_clocks()
245 if (rdev->family >= CHIP_RS780) r600_set_uvd_clocks()
273 if (rdev->family >= CHIP_RS780) r600_set_uvd_clocks()
276 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); r600_set_uvd_clocks()
293 struct radeon_device *rdev = dev->dev_private; dce3_program_fmt() local
344 int rv6xx_get_temp(struct radeon_device *rdev) rv6xx_get_temp() argument
356 void r600_pm_get_dynpm_state(struct radeon_device *rdev) r600_pm_get_dynpm_state() argument
360 rdev->pm.dynpm_can_upclock = true; r600_pm_get_dynpm_state()
361 rdev->pm.dynpm_can_downclock = true; r600_pm_get_dynpm_state()
364 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) { r600_pm_get_dynpm_state()
367 if (rdev->pm.num_power_states > 2) r600_pm_get_dynpm_state()
370 switch (rdev->pm.dynpm_planned_action) { r600_pm_get_dynpm_state()
372 rdev->pm.requested_power_state_index = min_power_state_index; r600_pm_get_dynpm_state()
373 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
374 rdev->pm.dynpm_can_downclock = false; r600_pm_get_dynpm_state()
377 if (rdev->pm.current_power_state_index == min_power_state_index) { r600_pm_get_dynpm_state()
378 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r600_pm_get_dynpm_state()
379 rdev->pm.dynpm_can_downclock = false; r600_pm_get_dynpm_state()
381 if (rdev->pm.active_crtc_count > 1) { r600_pm_get_dynpm_state()
382 for (i = 0; i < rdev->pm.num_power_states; i++) { r600_pm_get_dynpm_state()
383 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY) r600_pm_get_dynpm_state()
385 else if (i >= rdev->pm.current_power_state_index) { r600_pm_get_dynpm_state()
386 rdev->pm.requested_power_state_index = r600_pm_get_dynpm_state()
387 rdev->pm.current_power_state_index; r600_pm_get_dynpm_state()
390 rdev->pm.requested_power_state_index = i; r600_pm_get_dynpm_state()
395 if (rdev->pm.current_power_state_index == 0) r600_pm_get_dynpm_state()
396 rdev->pm.requested_power_state_index = r600_pm_get_dynpm_state()
397 rdev->pm.num_power_states - 1; r600_pm_get_dynpm_state()
399 rdev->pm.requested_power_state_index = r600_pm_get_dynpm_state()
400 rdev->pm.current_power_state_index - 1; r600_pm_get_dynpm_state()
403 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
405 if ((rdev->pm.active_crtc_count > 0) && r600_pm_get_dynpm_state()
406 (rdev->pm.power_state[rdev->pm.requested_power_state_index]. r600_pm_get_dynpm_state()
407 clock_info[rdev->pm.requested_clock_mode_index].flags & r600_pm_get_dynpm_state()
409 rdev->pm.requested_power_state_index++; r600_pm_get_dynpm_state()
413 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) { r600_pm_get_dynpm_state()
414 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r600_pm_get_dynpm_state()
415 rdev->pm.dynpm_can_upclock = false; r600_pm_get_dynpm_state()
417 if (rdev->pm.active_crtc_count > 1) { r600_pm_get_dynpm_state()
418 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) { r600_pm_get_dynpm_state()
419 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY) r600_pm_get_dynpm_state()
421 else if (i <= rdev->pm.current_power_state_index) { r600_pm_get_dynpm_state()
422 rdev->pm.requested_power_state_index = r600_pm_get_dynpm_state()
423 rdev->pm.current_power_state_index; r600_pm_get_dynpm_state()
426 rdev->pm.requested_power_state_index = i; r600_pm_get_dynpm_state()
431 rdev->pm.requested_power_state_index = r600_pm_get_dynpm_state()
432 rdev->pm.current_power_state_index + 1; r600_pm_get_dynpm_state()
434 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
437 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index; r600_pm_get_dynpm_state()
438 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
439 rdev->pm.dynpm_can_upclock = false; r600_pm_get_dynpm_state()
450 if (rdev->pm.active_crtc_count > 1) { r600_pm_get_dynpm_state()
451 rdev->pm.requested_power_state_index = -1; r600_pm_get_dynpm_state()
453 for (i = 1; i < rdev->pm.num_power_states; i++) { r600_pm_get_dynpm_state()
454 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY) r600_pm_get_dynpm_state()
456 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) || r600_pm_get_dynpm_state()
457 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) { r600_pm_get_dynpm_state()
458 rdev->pm.requested_power_state_index = i; r600_pm_get_dynpm_state()
463 if (rdev->pm.requested_power_state_index == -1) r600_pm_get_dynpm_state()
464 rdev->pm.requested_power_state_index = 0; r600_pm_get_dynpm_state()
466 rdev->pm.requested_power_state_index = 1; r600_pm_get_dynpm_state()
468 switch (rdev->pm.dynpm_planned_action) { r600_pm_get_dynpm_state()
470 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
471 rdev->pm.dynpm_can_downclock = false; r600_pm_get_dynpm_state()
474 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) { r600_pm_get_dynpm_state()
475 if (rdev->pm.current_clock_mode_index == 0) { r600_pm_get_dynpm_state()
476 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
477 rdev->pm.dynpm_can_downclock = false; r600_pm_get_dynpm_state()
479 rdev->pm.requested_clock_mode_index = r600_pm_get_dynpm_state()
480 rdev->pm.current_clock_mode_index - 1; r600_pm_get_dynpm_state()
482 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
483 rdev->pm.dynpm_can_downclock = false; r600_pm_get_dynpm_state()
486 if ((rdev->pm.active_crtc_count > 0) && r600_pm_get_dynpm_state()
487 (rdev->pm.power_state[rdev->pm.requested_power_state_index]. r600_pm_get_dynpm_state()
488 clock_info[rdev->pm.requested_clock_mode_index].flags & r600_pm_get_dynpm_state()
490 rdev->pm.requested_clock_mode_index++; r600_pm_get_dynpm_state()
494 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) { r600_pm_get_dynpm_state()
495 if (rdev->pm.current_clock_mode_index == r600_pm_get_dynpm_state()
496 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) { r600_pm_get_dynpm_state()
497 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index; r600_pm_get_dynpm_state()
498 rdev->pm.dynpm_can_upclock = false; r600_pm_get_dynpm_state()
500 rdev->pm.requested_clock_mode_index = r600_pm_get_dynpm_state()
501 rdev->pm.current_clock_mode_index + 1; r600_pm_get_dynpm_state()
503 rdev->pm.requested_clock_mode_index = r600_pm_get_dynpm_state()
504 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1; r600_pm_get_dynpm_state()
505 rdev->pm.dynpm_can_upclock = false; r600_pm_get_dynpm_state()
509 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index; r600_pm_get_dynpm_state()
510 rdev->pm.requested_clock_mode_index = 0; r600_pm_get_dynpm_state()
511 rdev->pm.dynpm_can_upclock = false; r600_pm_get_dynpm_state()
521 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r600_pm_get_dynpm_state()
522 clock_info[rdev->pm.requested_clock_mode_index].sclk, r600_pm_get_dynpm_state()
523 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r600_pm_get_dynpm_state()
524 clock_info[rdev->pm.requested_clock_mode_index].mclk, r600_pm_get_dynpm_state()
525 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r600_pm_get_dynpm_state()
529 void rs780_pm_init_profile(struct radeon_device *rdev) rs780_pm_init_profile() argument
531 if (rdev->pm.num_power_states == 2) { rs780_pm_init_profile()
533 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
534 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
535 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
536 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
538 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
539 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
540 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
541 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
543 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
544 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
545 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
546 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
548 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
549 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
550 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
551 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
553 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
554 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
555 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
556 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
558 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
559 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
560 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
561 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
563 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; rs780_pm_init_profile()
564 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
565 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
566 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
567 } else if (rdev->pm.num_power_states == 3) { rs780_pm_init_profile()
569 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
570 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
571 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
572 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
574 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
575 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
576 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
577 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
579 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
580 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
581 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
582 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
584 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
585 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2; rs780_pm_init_profile()
586 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
587 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
589 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
590 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
591 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
592 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
594 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
595 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1; rs780_pm_init_profile()
596 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
597 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
599 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1; rs780_pm_init_profile()
600 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2; rs780_pm_init_profile()
601 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
602 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
605 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
606 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; rs780_pm_init_profile()
607 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
608 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
610 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
611 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2; rs780_pm_init_profile()
612 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
613 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
615 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
616 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2; rs780_pm_init_profile()
617 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
618 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
620 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
621 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3; rs780_pm_init_profile()
622 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
623 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
625 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
626 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
627 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
628 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
630 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
631 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0; rs780_pm_init_profile()
632 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
633 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
635 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2; rs780_pm_init_profile()
636 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3; rs780_pm_init_profile()
637 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; rs780_pm_init_profile()
638 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; rs780_pm_init_profile()
642 void r600_pm_init_profile(struct radeon_device *rdev) r600_pm_init_profile() argument
646 if (rdev->family == CHIP_R600) { r600_pm_init_profile()
649 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
650 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
651 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
652 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
654 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
655 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
656 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
657 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
659 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
660 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
661 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
662 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
664 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
665 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
666 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
667 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
669 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
670 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
671 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
672 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
674 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
675 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
676 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
677 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
679 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
680 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
681 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
682 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
684 if (rdev->pm.num_power_states < 4) { r600_pm_init_profile()
686 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
687 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
688 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
689 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
691 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1; r600_pm_init_profile()
692 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1; r600_pm_init_profile()
693 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
694 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
696 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1; r600_pm_init_profile()
697 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1; r600_pm_init_profile()
698 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
699 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1; r600_pm_init_profile()
701 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1; r600_pm_init_profile()
702 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1; r600_pm_init_profile()
703 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
704 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
706 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2; r600_pm_init_profile()
707 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2; r600_pm_init_profile()
708 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
709 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
711 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2; r600_pm_init_profile()
712 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2; r600_pm_init_profile()
713 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
714 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1; r600_pm_init_profile()
716 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2; r600_pm_init_profile()
717 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2; r600_pm_init_profile()
718 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
719 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
722 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
723 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r600_pm_init_profile()
724 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
725 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
727 if (rdev->flags & RADEON_IS_MOBILITY) r600_pm_init_profile()
728 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0); r600_pm_init_profile()
730 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); r600_pm_init_profile()
731 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
732 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
733 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
734 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
736 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
737 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
738 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
739 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1; r600_pm_init_profile()
741 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0); r600_pm_init_profile()
742 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
743 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
744 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
745 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
747 if (rdev->flags & RADEON_IS_MOBILITY) r600_pm_init_profile()
748 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1); r600_pm_init_profile()
750 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1); r600_pm_init_profile()
751 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
752 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
753 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
754 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; r600_pm_init_profile()
756 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
757 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
758 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
759 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1; r600_pm_init_profile()
761 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1); r600_pm_init_profile()
762 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx; r600_pm_init_profile()
763 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx; r600_pm_init_profile()
764 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; r600_pm_init_profile()
765 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2; r600_pm_init_profile()
770 void r600_pm_misc(struct radeon_device *rdev) r600_pm_misc() argument
772 int req_ps_idx = rdev->pm.requested_power_state_index; r600_pm_misc()
773 int req_cm_idx = rdev->pm.requested_clock_mode_index; r600_pm_misc()
774 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; r600_pm_misc()
781 if (voltage->voltage != rdev->pm.current_vddc) { r600_pm_misc()
782 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC); r600_pm_misc()
783 rdev->pm.current_vddc = voltage->voltage; r600_pm_misc()
789 bool r600_gui_idle(struct radeon_device *rdev) r600_gui_idle() argument
798 bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) r600_hpd_sense() argument
802 if (ASIC_IS_DCE3(rdev)) { r600_hpd_sense()
853 void r600_hpd_set_polarity(struct radeon_device *rdev, r600_hpd_set_polarity() argument
857 bool connected = r600_hpd_sense(rdev, hpd); r600_hpd_set_polarity()
859 if (ASIC_IS_DCE3(rdev)) { r600_hpd_set_polarity()
945 void r600_hpd_init(struct radeon_device *rdev) r600_hpd_init() argument
947 struct drm_device *dev = rdev->ddev; r600_hpd_init()
962 if (ASIC_IS_DCE3(rdev)) { r600_hpd_init()
964 if (ASIC_IS_DCE32(rdev)) r600_hpd_init()
1006 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd); r600_hpd_init()
1008 radeon_irq_kms_enable_hpd(rdev, enable); r600_hpd_init()
1011 void r600_hpd_fini(struct radeon_device *rdev) r600_hpd_fini() argument
1013 struct drm_device *dev = rdev->ddev; r600_hpd_fini()
1019 if (ASIC_IS_DCE3(rdev)) { r600_hpd_fini()
1060 radeon_irq_kms_disable_hpd(rdev, disable); r600_hpd_fini()
1066 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev) r600_pcie_gart_tlb_flush() argument
1072 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) && r600_pcie_gart_tlb_flush()
1073 !(rdev->flags & RADEON_IS_AGP)) { r600_pcie_gart_tlb_flush()
1074 void __iomem *ptr = (void *)rdev->gart.ptr; r600_pcie_gart_tlb_flush()
1087 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12); r600_pcie_gart_tlb_flush()
1088 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12); r600_pcie_gart_tlb_flush()
1090 for (i = 0; i < rdev->usec_timeout; i++) { r600_pcie_gart_tlb_flush()
1105 int r600_pcie_gart_init(struct radeon_device *rdev) r600_pcie_gart_init() argument
1109 if (rdev->gart.robj) { r600_pcie_gart_init()
1114 r = radeon_gart_init(rdev); r600_pcie_gart_init()
1117 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; r600_pcie_gart_init()
1118 return radeon_gart_table_vram_alloc(rdev); r600_pcie_gart_init()
1121 static int r600_pcie_gart_enable(struct radeon_device *rdev) r600_pcie_gart_enable() argument
1126 if (rdev->gart.robj == NULL) { r600_pcie_gart_enable()
1127 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); r600_pcie_gart_enable()
1130 r = radeon_gart_table_vram_pin(rdev); r600_pcie_gart_enable()
1161 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); r600_pcie_gart_enable()
1162 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); r600_pcie_gart_enable()
1163 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); r600_pcie_gart_enable()
1167 (u32)(rdev->dummy_page.addr >> 12)); r600_pcie_gart_enable()
1171 r600_pcie_gart_tlb_flush(rdev); r600_pcie_gart_enable()
1173 (unsigned)(rdev->mc.gtt_size >> 20), r600_pcie_gart_enable()
1174 (unsigned long long)rdev->gart.table_addr); r600_pcie_gart_enable()
1175 rdev->gart.ready = true; r600_pcie_gart_enable()
1179 static void r600_pcie_gart_disable(struct radeon_device *rdev) r600_pcie_gart_disable() argument
1211 radeon_gart_table_vram_unpin(rdev); r600_pcie_gart_disable()
1214 static void r600_pcie_gart_fini(struct radeon_device *rdev) r600_pcie_gart_fini() argument
1216 radeon_gart_fini(rdev); r600_pcie_gart_fini()
1217 r600_pcie_gart_disable(rdev); r600_pcie_gart_fini()
1218 radeon_gart_table_vram_free(rdev); r600_pcie_gart_fini()
1221 static void r600_agp_enable(struct radeon_device *rdev) r600_agp_enable() argument
1255 int r600_mc_wait_for_idle(struct radeon_device *rdev) r600_mc_wait_for_idle() argument
1260 for (i = 0; i < rdev->usec_timeout; i++) { r600_mc_wait_for_idle()
1270 uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg) rs780_mc_rreg() argument
1275 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs780_mc_rreg()
1279 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs780_mc_rreg()
1283 void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rs780_mc_wreg() argument
1287 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs780_mc_wreg()
1292 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs780_mc_wreg()
1295 static void r600_mc_program(struct radeon_device *rdev) r600_mc_program() argument
1311 rv515_mc_stop(rdev, &save); r600_mc_program()
1312 if (r600_mc_wait_for_idle(rdev)) { r600_mc_program()
1313 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); r600_mc_program()
1318 if (rdev->flags & RADEON_IS_AGP) { r600_mc_program()
1319 if (rdev->mc.vram_start < rdev->mc.gtt_start) { r600_mc_program()
1322 rdev->mc.vram_start >> 12); r600_mc_program()
1324 rdev->mc.gtt_end >> 12); r600_mc_program()
1328 rdev->mc.gtt_start >> 12); r600_mc_program()
1330 rdev->mc.vram_end >> 12); r600_mc_program()
1333 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12); r600_mc_program()
1334 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12); r600_mc_program()
1336 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); r600_mc_program()
1337 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; r600_mc_program()
1338 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); r600_mc_program()
1340 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); r600_mc_program()
1343 if (rdev->flags & RADEON_IS_AGP) { r600_mc_program()
1344 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22); r600_mc_program()
1345 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22); r600_mc_program()
1346 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); r600_mc_program()
1352 if (r600_mc_wait_for_idle(rdev)) { r600_mc_program()
1353 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); r600_mc_program()
1355 rv515_mc_resume(rdev, &save); r600_mc_program()
1358 rv515_vga_render_disable(rdev); r600_mc_program()
1363 * @rdev: radeon device structure holding all necessary informations
1382 static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc) r600_vram_gtt_location() argument
1388 dev_warn(rdev->dev, "limiting VRAM\n"); r600_vram_gtt_location()
1392 if (rdev->flags & RADEON_IS_AGP) { r600_vram_gtt_location()
1397 dev_warn(rdev->dev, "limiting VRAM\n"); r600_vram_gtt_location()
1404 dev_warn(rdev->dev, "limiting VRAM\n"); r600_vram_gtt_location()
1411 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n", r600_vram_gtt_location()
1416 if (rdev->flags & RADEON_IS_IGP) { r600_vram_gtt_location()
1420 radeon_vram_location(rdev, &rdev->mc, base); r600_vram_gtt_location()
1421 rdev->mc.gtt_base_align = 0; r600_vram_gtt_location()
1422 radeon_gtt_location(rdev, mc); r600_vram_gtt_location()
1426 static int r600_mc_init(struct radeon_device *rdev) r600_mc_init() argument
1434 rdev->mc.vram_is_ddr = true; r600_mc_init()
1459 rdev->mc.vram_width = numchan * chansize; r600_mc_init()
1461 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); r600_mc_init()
1462 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); r600_mc_init()
1464 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); r600_mc_init()
1465 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); r600_mc_init()
1466 rdev->mc.visible_vram_size = rdev->mc.aper_size; r600_mc_init()
1467 r600_vram_gtt_location(rdev, &rdev->mc); r600_mc_init()
1469 if (rdev->flags & RADEON_IS_IGP) { r600_mc_init()
1470 rs690_pm_info(rdev); r600_mc_init()
1471 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev); r600_mc_init()
1473 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) { r600_mc_init()
1475 rdev->fastfb_working = false; r600_mc_init()
1480 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL) r600_mc_init()
1486 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) { r600_mc_init()
1488 (unsigned long long)rdev->mc.aper_base, k8_addr); r600_mc_init()
1489 rdev->mc.aper_base = (resource_size_t)k8_addr; r600_mc_init()
1490 rdev->fastfb_working = true; r600_mc_init()
1496 radeon_update_bandwidth_info(rdev); r600_mc_init()
1500 int r600_vram_scratch_init(struct radeon_device *rdev) r600_vram_scratch_init() argument
1504 if (rdev->vram_scratch.robj == NULL) { r600_vram_scratch_init()
1505 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE, r600_vram_scratch_init()
1507 0, NULL, NULL, &rdev->vram_scratch.robj); r600_vram_scratch_init()
1513 r = radeon_bo_reserve(rdev->vram_scratch.robj, false); r600_vram_scratch_init()
1516 r = radeon_bo_pin(rdev->vram_scratch.robj, r600_vram_scratch_init()
1517 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr); r600_vram_scratch_init()
1519 radeon_bo_unreserve(rdev->vram_scratch.robj); r600_vram_scratch_init()
1522 r = radeon_bo_kmap(rdev->vram_scratch.robj, r600_vram_scratch_init()
1523 (void **)&rdev->vram_scratch.ptr); r600_vram_scratch_init()
1525 radeon_bo_unpin(rdev->vram_scratch.robj); r600_vram_scratch_init()
1526 radeon_bo_unreserve(rdev->vram_scratch.robj); r600_vram_scratch_init()
1531 void r600_vram_scratch_fini(struct radeon_device *rdev) r600_vram_scratch_fini() argument
1535 if (rdev->vram_scratch.robj == NULL) { r600_vram_scratch_fini()
1538 r = radeon_bo_reserve(rdev->vram_scratch.robj, false); r600_vram_scratch_fini()
1540 radeon_bo_kunmap(rdev->vram_scratch.robj); r600_vram_scratch_fini()
1541 radeon_bo_unpin(rdev->vram_scratch.robj); r600_vram_scratch_fini()
1542 radeon_bo_unreserve(rdev->vram_scratch.robj); r600_vram_scratch_fini()
1544 radeon_bo_unref(&rdev->vram_scratch.robj); r600_vram_scratch_fini()
1547 void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung) r600_set_bios_scratch_engine_hung() argument
1559 static void r600_print_gpu_status_regs(struct radeon_device *rdev) r600_print_gpu_status_regs() argument
1561 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n", r600_print_gpu_status_regs()
1563 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n", r600_print_gpu_status_regs()
1565 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n", r600_print_gpu_status_regs()
1567 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n", r600_print_gpu_status_regs()
1569 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n", r600_print_gpu_status_regs()
1571 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n", r600_print_gpu_status_regs()
1573 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n", r600_print_gpu_status_regs()
1575 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n", r600_print_gpu_status_regs()
1579 static bool r600_is_display_hung(struct radeon_device *rdev) r600_is_display_hung() argument
1585 for (i = 0; i < rdev->num_crtc; i++) { r600_is_display_hung()
1593 for (i = 0; i < rdev->num_crtc; i++) { r600_is_display_hung()
1608 u32 r600_gpu_check_soft_reset(struct radeon_device *rdev) r600_gpu_check_soft_reset() argument
1615 if (rdev->family >= CHIP_RV770) { r600_gpu_check_soft_reset()
1665 if (r600_is_display_hung(rdev)) r600_gpu_check_soft_reset()
1677 static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) r600_gpu_soft_reset() argument
1686 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); r600_gpu_soft_reset()
1688 r600_print_gpu_status_regs(rdev); r600_gpu_soft_reset()
1691 if (rdev->family >= CHIP_RV770) r600_gpu_soft_reset()
1708 rv515_mc_stop(rdev, &save); r600_gpu_soft_reset()
1709 if (r600_mc_wait_for_idle(rdev)) { r600_gpu_soft_reset()
1710 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); r600_gpu_soft_reset()
1714 if (rdev->family >= CHIP_RV770) r600_gpu_soft_reset()
1750 if (rdev->family >= CHIP_RV770) r600_gpu_soft_reset()
1768 if (!(rdev->flags & RADEON_IS_IGP)) { r600_gpu_soft_reset()
1779 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp); r600_gpu_soft_reset()
1793 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); r600_gpu_soft_reset()
1807 rv515_mc_resume(rdev, &save); r600_gpu_soft_reset()
1810 r600_print_gpu_status_regs(rdev); r600_gpu_soft_reset()
1813 static void r600_gpu_pci_config_reset(struct radeon_device *rdev) r600_gpu_pci_config_reset() argument
1818 dev_info(rdev->dev, "GPU pci config reset\n"); r600_gpu_pci_config_reset()
1823 if (rdev->family >= CHIP_RV770) r600_gpu_pci_config_reset()
1839 if (rdev->family >= CHIP_RV770) r600_gpu_pci_config_reset()
1840 rv770_set_clk_bypass_mode(rdev); r600_gpu_pci_config_reset()
1842 pci_clear_master(rdev->pdev); r600_gpu_pci_config_reset()
1844 rv515_mc_stop(rdev, &save); r600_gpu_pci_config_reset()
1845 if (r600_mc_wait_for_idle(rdev)) { r600_gpu_pci_config_reset()
1846 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); r600_gpu_pci_config_reset()
1857 radeon_pci_config_reset(rdev); r600_gpu_pci_config_reset()
1867 for (i = 0; i < rdev->usec_timeout; i++) { r600_gpu_pci_config_reset()
1874 int r600_asic_reset(struct radeon_device *rdev) r600_asic_reset() argument
1878 reset_mask = r600_gpu_check_soft_reset(rdev); r600_asic_reset()
1881 r600_set_bios_scratch_engine_hung(rdev, true); r600_asic_reset()
1884 r600_gpu_soft_reset(rdev, reset_mask); r600_asic_reset()
1886 reset_mask = r600_gpu_check_soft_reset(rdev); r600_asic_reset()
1890 r600_gpu_pci_config_reset(rdev); r600_asic_reset()
1892 reset_mask = r600_gpu_check_soft_reset(rdev); r600_asic_reset()
1895 r600_set_bios_scratch_engine_hung(rdev, false); r600_asic_reset()
1903 * @rdev: radeon_device pointer
1909 bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) r600_gfx_is_lockup() argument
1911 u32 reset_mask = r600_gpu_check_soft_reset(rdev); r600_gfx_is_lockup()
1916 radeon_ring_lockup_update(rdev, ring); r600_gfx_is_lockup()
1919 return radeon_ring_test_lockup(rdev, ring); r600_gfx_is_lockup()
1922 u32 r6xx_remap_render_backend(struct radeon_device *rdev, r6xx_remap_render_backend() argument
1946 if (rdev->family <= CHIP_RV740) { r6xx_remap_render_backend()
1977 static void r600_gpu_init(struct radeon_device *rdev) r600_gpu_init() argument
1992 rdev->config.r600.tiling_group_size = 256; r600_gpu_init()
1993 switch (rdev->family) { r600_gpu_init()
1995 rdev->config.r600.max_pipes = 4; r600_gpu_init()
1996 rdev->config.r600.max_tile_pipes = 8; r600_gpu_init()
1997 rdev->config.r600.max_simds = 4; r600_gpu_init()
1998 rdev->config.r600.max_backends = 4; r600_gpu_init()
1999 rdev->config.r600.max_gprs = 256; r600_gpu_init()
2000 rdev->config.r600.max_threads = 192; r600_gpu_init()
2001 rdev->config.r600.max_stack_entries = 256; r600_gpu_init()
2002 rdev->config.r600.max_hw_contexts = 8; r600_gpu_init()
2003 rdev->config.r600.max_gs_threads = 16; r600_gpu_init()
2004 rdev->config.r600.sx_max_export_size = 128; r600_gpu_init()
2005 rdev->config.r600.sx_max_export_pos_size = 16; r600_gpu_init()
2006 rdev->config.r600.sx_max_export_smx_size = 128; r600_gpu_init()
2007 rdev->config.r600.sq_num_cf_insts = 2; r600_gpu_init()
2011 rdev->config.r600.max_pipes = 2; r600_gpu_init()
2012 rdev->config.r600.max_tile_pipes = 2; r600_gpu_init()
2013 rdev->config.r600.max_simds = 3; r600_gpu_init()
2014 rdev->config.r600.max_backends = 1; r600_gpu_init()
2015 rdev->config.r600.max_gprs = 128; r600_gpu_init()
2016 rdev->config.r600.max_threads = 192; r600_gpu_init()
2017 rdev->config.r600.max_stack_entries = 128; r600_gpu_init()
2018 rdev->config.r600.max_hw_contexts = 8; r600_gpu_init()
2019 rdev->config.r600.max_gs_threads = 4; r600_gpu_init()
2020 rdev->config.r600.sx_max_export_size = 128; r600_gpu_init()
2021 rdev->config.r600.sx_max_export_pos_size = 16; r600_gpu_init()
2022 rdev->config.r600.sx_max_export_smx_size = 128; r600_gpu_init()
2023 rdev->config.r600.sq_num_cf_insts = 2; r600_gpu_init()
2029 rdev->config.r600.max_pipes = 1; r600_gpu_init()
2030 rdev->config.r600.max_tile_pipes = 1; r600_gpu_init()
2031 rdev->config.r600.max_simds = 2; r600_gpu_init()
2032 rdev->config.r600.max_backends = 1; r600_gpu_init()
2033 rdev->config.r600.max_gprs = 128; r600_gpu_init()
2034 rdev->config.r600.max_threads = 192; r600_gpu_init()
2035 rdev->config.r600.max_stack_entries = 128; r600_gpu_init()
2036 rdev->config.r600.max_hw_contexts = 4; r600_gpu_init()
2037 rdev->config.r600.max_gs_threads = 4; r600_gpu_init()
2038 rdev->config.r600.sx_max_export_size = 128; r600_gpu_init()
2039 rdev->config.r600.sx_max_export_pos_size = 16; r600_gpu_init()
2040 rdev->config.r600.sx_max_export_smx_size = 128; r600_gpu_init()
2041 rdev->config.r600.sq_num_cf_insts = 1; r600_gpu_init()
2044 rdev->config.r600.max_pipes = 4; r600_gpu_init()
2045 rdev->config.r600.max_tile_pipes = 4; r600_gpu_init()
2046 rdev->config.r600.max_simds = 4; r600_gpu_init()
2047 rdev->config.r600.max_backends = 4; r600_gpu_init()
2048 rdev->config.r600.max_gprs = 192; r600_gpu_init()
2049 rdev->config.r600.max_threads = 192; r600_gpu_init()
2050 rdev->config.r600.max_stack_entries = 256; r600_gpu_init()
2051 rdev->config.r600.max_hw_contexts = 8; r600_gpu_init()
2052 rdev->config.r600.max_gs_threads = 16; r600_gpu_init()
2053 rdev->config.r600.sx_max_export_size = 128; r600_gpu_init()
2054 rdev->config.r600.sx_max_export_pos_size = 16; r600_gpu_init()
2055 rdev->config.r600.sx_max_export_smx_size = 128; r600_gpu_init()
2056 rdev->config.r600.sq_num_cf_insts = 2; r600_gpu_init()
2076 switch (rdev->config.r600.max_tile_pipes) { r600_gpu_init()
2092 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes; r600_gpu_init()
2093 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT); r600_gpu_init()
2108 tmp = rdev->config.r600.max_simds - r600_gpu_init()
2110 rdev->config.r600.active_simds = tmp; r600_gpu_init()
2114 for (i = 0; i < rdev->config.r600.max_backends; i++) r600_gpu_init()
2118 for (i = 0; i < rdev->config.r600.max_backends; i++) r600_gpu_init()
2122 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends, r600_gpu_init()
2125 rdev->config.r600.backend_map = tmp; r600_gpu_init()
2127 rdev->config.r600.tile_config = tiling_config; r600_gpu_init()
2144 if (rdev->family == CHIP_RV670) r600_gpu_init()
2149 if ((rdev->family > CHIP_R600)) r600_gpu_init()
2153 if (((rdev->family) == CHIP_R600) || r600_gpu_init()
2154 ((rdev->family) == CHIP_RV630) || r600_gpu_init()
2155 ((rdev->family) == CHIP_RV610) || r600_gpu_init()
2156 ((rdev->family) == CHIP_RV620) || r600_gpu_init()
2157 ((rdev->family) == CHIP_RS780) || r600_gpu_init()
2158 ((rdev->family) == CHIP_RS880)) { r600_gpu_init()
2173 if (((rdev->family) == CHIP_RV610) || r600_gpu_init()
2174 ((rdev->family) == CHIP_RV620) || r600_gpu_init()
2175 ((rdev->family) == CHIP_RS780) || r600_gpu_init()
2176 ((rdev->family) == CHIP_RS880)) { r600_gpu_init()
2181 } else if (((rdev->family) == CHIP_R600) || r600_gpu_init()
2182 ((rdev->family) == CHIP_RV630)) { r600_gpu_init()
2203 if ((rdev->family) == CHIP_R600) { r600_gpu_init()
2217 } else if (((rdev->family) == CHIP_RV610) || r600_gpu_init()
2218 ((rdev->family) == CHIP_RV620) || r600_gpu_init()
2219 ((rdev->family) == CHIP_RS780) || r600_gpu_init()
2220 ((rdev->family) == CHIP_RS880)) { r600_gpu_init()
2237 } else if (((rdev->family) == CHIP_RV630) || r600_gpu_init()
2238 ((rdev->family) == CHIP_RV635)) { r600_gpu_init()
2252 } else if ((rdev->family) == CHIP_RV670) { r600_gpu_init()
2275 if (((rdev->family) == CHIP_RV610) || r600_gpu_init()
2276 ((rdev->family) == CHIP_RV620) || r600_gpu_init()
2277 ((rdev->family) == CHIP_RS780) || r600_gpu_init()
2278 ((rdev->family) == CHIP_RS880)) { r600_gpu_init()
2301 tmp = rdev->config.r600.max_pipes * 16; r600_gpu_init()
2302 switch (rdev->family) { r600_gpu_init()
2345 switch (rdev->family) { r600_gpu_init()
2383 u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg) r600_pciep_rreg() argument
2388 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); r600_pciep_rreg()
2392 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); r600_pciep_rreg()
2396 void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v) r600_pciep_wreg() argument
2400 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); r600_pciep_wreg()
2405 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); r600_pciep_wreg()
2411 void r600_cp_stop(struct radeon_device *rdev) r600_cp_stop() argument
2413 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) r600_cp_stop()
2414 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); r600_cp_stop()
2417 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; r600_cp_stop()
2420 int r600_init_microcode(struct radeon_device *rdev) r600_init_microcode() argument
2431 switch (rdev->family) { r600_init_microcode()
2525 if (rdev->family >= CHIP_CEDAR) { r600_init_microcode()
2529 } else if (rdev->family >= CHIP_RV770) { r600_init_microcode()
2542 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); r600_init_microcode()
2545 if (rdev->pfp_fw->size != pfp_req_size) { r600_init_microcode()
2548 rdev->pfp_fw->size, fw_name); r600_init_microcode()
2554 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); r600_init_microcode()
2557 if (rdev->me_fw->size != me_req_size) { r600_init_microcode()
2560 rdev->me_fw->size, fw_name); r600_init_microcode()
2565 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); r600_init_microcode()
2568 if (rdev->rlc_fw->size != rlc_req_size) { r600_init_microcode()
2571 rdev->rlc_fw->size, fw_name); r600_init_microcode()
2575 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) { r600_init_microcode()
2577 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); r600_init_microcode()
2582 release_firmware(rdev->smc_fw); r600_init_microcode()
2583 rdev->smc_fw = NULL; r600_init_microcode()
2585 } else if (rdev->smc_fw->size != smc_req_size) { r600_init_microcode()
2588 rdev->smc_fw->size, fw_name); r600_init_microcode()
2599 release_firmware(rdev->pfp_fw); r600_init_microcode()
2600 rdev->pfp_fw = NULL; r600_init_microcode()
2601 release_firmware(rdev->me_fw); r600_init_microcode()
2602 rdev->me_fw = NULL; r600_init_microcode()
2603 release_firmware(rdev->rlc_fw); r600_init_microcode()
2604 rdev->rlc_fw = NULL; r600_init_microcode()
2605 release_firmware(rdev->smc_fw); r600_init_microcode()
2606 rdev->smc_fw = NULL; r600_init_microcode()
2611 u32 r600_gfx_get_rptr(struct radeon_device *rdev, r600_gfx_get_rptr() argument
2616 if (rdev->wb.enabled) r600_gfx_get_rptr()
2617 rptr = rdev->wb.wb[ring->rptr_offs/4]; r600_gfx_get_rptr()
2624 u32 r600_gfx_get_wptr(struct radeon_device *rdev, r600_gfx_get_wptr() argument
2634 void r600_gfx_set_wptr(struct radeon_device *rdev, r600_gfx_set_wptr() argument
2641 static int r600_cp_load_microcode(struct radeon_device *rdev) r600_cp_load_microcode() argument
2646 if (!rdev->me_fw || !rdev->pfp_fw) r600_cp_load_microcode()
2649 r600_cp_stop(rdev); r600_cp_load_microcode()
2665 fw_data = (const __be32 *)rdev->me_fw->data; r600_cp_load_microcode()
2671 fw_data = (const __be32 *)rdev->pfp_fw->data; r600_cp_load_microcode()
2683 int r600_cp_start(struct radeon_device *rdev) r600_cp_start() argument
2685 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r600_cp_start()
2689 r = radeon_ring_lock(rdev, ring, 7); r600_cp_start()
2696 if (rdev->family >= CHIP_RV770) { r600_cp_start()
2698 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1); r600_cp_start()
2701 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1); r600_cp_start()
2706 radeon_ring_unlock_commit(rdev, ring, false); r600_cp_start()
2713 int r600_cp_resume(struct radeon_device *rdev) r600_cp_resume() argument
2715 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r600_cp_resume()
2746 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC)); r600_cp_resume()
2747 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); r600_cp_resume()
2748 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); r600_cp_resume()
2750 if (rdev->wb.enabled) r600_cp_resume()
2763 r600_cp_start(rdev); r600_cp_resume()
2765 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring); r600_cp_resume()
2771 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) r600_cp_resume()
2772 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); r600_cp_resume()
2777 void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size) r600_ring_init() argument
2788 if (radeon_ring_supports_scratch_reg(rdev, ring)) { r600_ring_init()
2789 r = radeon_scratch_get(rdev, &ring->rptr_save_reg); r600_ring_init()
2797 void r600_cp_fini(struct radeon_device *rdev) r600_cp_fini() argument
2799 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r600_cp_fini()
2800 r600_cp_stop(rdev); r600_cp_fini()
2801 radeon_ring_fini(rdev, ring); r600_cp_fini()
2802 radeon_scratch_free(rdev, ring->rptr_save_reg); r600_cp_fini()
2808 void r600_scratch_init(struct radeon_device *rdev) r600_scratch_init() argument
2812 rdev->scratch.num_reg = 7; r600_scratch_init()
2813 rdev->scratch.reg_base = SCRATCH_REG0; r600_scratch_init()
2814 for (i = 0; i < rdev->scratch.num_reg; i++) { r600_scratch_init()
2815 rdev->scratch.free[i] = true; r600_scratch_init()
2816 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); r600_scratch_init()
2820 int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) r600_ring_test() argument
2827 r = radeon_scratch_get(rdev, &scratch); r600_ring_test()
2833 r = radeon_ring_lock(rdev, ring, 3); r600_ring_test()
2836 radeon_scratch_free(rdev, scratch); r600_ring_test()
2842 radeon_ring_unlock_commit(rdev, ring, false); r600_ring_test()
2843 for (i = 0; i < rdev->usec_timeout; i++) { r600_ring_test()
2849 if (i < rdev->usec_timeout) { r600_ring_test()
2856 radeon_scratch_free(rdev, scratch); r600_ring_test()
2864 void r600_fence_ring_emit(struct radeon_device *rdev, r600_fence_ring_emit() argument
2867 struct radeon_ring *ring = &rdev->ring[fence->ring]; r600_fence_ring_emit()
2871 if (rdev->family >= CHIP_RV770) r600_fence_ring_emit()
2874 if (rdev->wb.use_event) { r600_fence_ring_emit()
2875 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; r600_fence_ring_emit()
2904 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2)); r600_fence_ring_emit()
2915 * @rdev: radeon_device pointer
2923 bool r600_semaphore_ring_emit(struct radeon_device *rdev, r600_semaphore_ring_emit() argument
2931 if (rdev->family < CHIP_CAYMAN) r600_semaphore_ring_emit()
2939 if (emit_wait && (rdev->family >= CHIP_CEDAR)) { r600_semaphore_ring_emit()
2951 * @rdev: radeon_device pointer
2961 struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev, r600_copy_cpdma() argument
2968 int ring_index = rdev->asic->copy.blit_ring_index; r600_copy_cpdma()
2969 struct radeon_ring *ring = &rdev->ring[ring_index]; r600_copy_cpdma()
2978 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24); r600_copy_cpdma()
2981 radeon_sync_free(rdev, &sync, NULL); r600_copy_cpdma()
2985 radeon_sync_resv(rdev, &sync, resv, false); r600_copy_cpdma()
2986 radeon_sync_rings(rdev, &sync, ring->idx); r600_copy_cpdma()
3012 r = radeon_fence_emit(rdev, &fence, ring->idx); r600_copy_cpdma()
3014 radeon_ring_unlock_undo(rdev, ring); r600_copy_cpdma()
3015 radeon_sync_free(rdev, &sync, NULL); r600_copy_cpdma()
3019 radeon_ring_unlock_commit(rdev, ring, false); r600_copy_cpdma()
3020 radeon_sync_free(rdev, &sync, fence); r600_copy_cpdma()
3025 int r600_set_surface_reg(struct radeon_device *rdev, int reg, r600_set_surface_reg() argument
3033 void r600_clear_surface_reg(struct radeon_device *rdev, int reg) r600_clear_surface_reg() argument
3038 static int r600_startup(struct radeon_device *rdev) r600_startup() argument
3044 r600_pcie_gen2_enable(rdev); r600_startup()
3047 r = r600_vram_scratch_init(rdev); r600_startup()
3051 r600_mc_program(rdev); r600_startup()
3053 if (rdev->flags & RADEON_IS_AGP) { r600_startup()
3054 r600_agp_enable(rdev); r600_startup()
3056 r = r600_pcie_gart_enable(rdev); r600_startup()
3060 r600_gpu_init(rdev); r600_startup()
3063 r = radeon_wb_init(rdev); r600_startup()
3067 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); r600_startup()
3069 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); r600_startup()
3073 if (rdev->has_uvd) { r600_startup()
3074 r = uvd_v1_0_resume(rdev); r600_startup()
3076 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX); r600_startup()
3078 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); r600_startup()
3082 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; r600_startup()
3086 if (!rdev->irq.installed) { r600_startup()
3087 r = radeon_irq_kms_init(rdev); r600_startup()
3092 r = r600_irq_init(rdev); r600_startup()
3095 radeon_irq_kms_fini(rdev); r600_startup()
3098 r600_irq_set(rdev); r600_startup()
3100 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r600_startup()
3101 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, r600_startup()
3106 r = r600_cp_load_microcode(rdev); r600_startup()
3109 r = r600_cp_resume(rdev); r600_startup()
3113 if (rdev->has_uvd) { r600_startup()
3114 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; r600_startup()
3116 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, r600_startup()
3119 r = uvd_v1_0_init(rdev); r600_startup()
3125 r = radeon_ib_pool_init(rdev); r600_startup()
3127 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); r600_startup()
3131 r = radeon_audio_init(rdev); r600_startup()
3140 void r600_vga_set_state(struct radeon_device *rdev, bool state) r600_vga_set_state() argument
3154 int r600_resume(struct radeon_device *rdev) r600_resume() argument
3163 atom_asic_init(rdev->mode_info.atom_context); r600_resume()
3165 if (rdev->pm.pm_method == PM_METHOD_DPM) r600_resume()
3166 radeon_pm_resume(rdev); r600_resume()
3168 rdev->accel_working = true; r600_resume()
3169 r = r600_startup(rdev); r600_resume()
3172 rdev->accel_working = false; r600_resume()
3179 int r600_suspend(struct radeon_device *rdev) r600_suspend() argument
3181 radeon_pm_suspend(rdev); r600_suspend()
3182 radeon_audio_fini(rdev); r600_suspend()
3183 r600_cp_stop(rdev); r600_suspend()
3184 if (rdev->has_uvd) { r600_suspend()
3185 uvd_v1_0_fini(rdev); r600_suspend()
3186 radeon_uvd_suspend(rdev); r600_suspend()
3188 r600_irq_suspend(rdev); r600_suspend()
3189 radeon_wb_disable(rdev); r600_suspend()
3190 r600_pcie_gart_disable(rdev); r600_suspend()
3201 int r600_init(struct radeon_device *rdev) r600_init() argument
3205 if (r600_debugfs_mc_info_init(rdev)) { r600_init()
3209 if (!radeon_get_bios(rdev)) { r600_init()
3210 if (ASIC_IS_AVIVO(rdev)) r600_init()
3214 if (!rdev->is_atom_bios) { r600_init()
3215 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n"); r600_init()
3218 r = radeon_atombios_init(rdev); r600_init()
3222 if (!radeon_card_posted(rdev)) { r600_init()
3223 if (!rdev->bios) { r600_init()
3224 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); r600_init()
3228 atom_asic_init(rdev->mode_info.atom_context); r600_init()
3231 r600_scratch_init(rdev); r600_init()
3233 radeon_surface_init(rdev); r600_init()
3235 radeon_get_clock_info(rdev->ddev); r600_init()
3237 r = radeon_fence_driver_init(rdev); r600_init()
3240 if (rdev->flags & RADEON_IS_AGP) { r600_init()
3241 r = radeon_agp_init(rdev); r600_init()
3243 radeon_agp_disable(rdev); r600_init()
3245 r = r600_mc_init(rdev); r600_init()
3249 r = radeon_bo_init(rdev); r600_init()
3253 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { r600_init()
3254 r = r600_init_microcode(rdev); r600_init()
3262 radeon_pm_init(rdev); r600_init()
3264 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL; r600_init()
3265 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024); r600_init()
3267 if (rdev->has_uvd) { r600_init()
3268 r = radeon_uvd_init(rdev); r600_init()
3270 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; r600_init()
3271 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); r600_init()
3275 rdev->ih.ring_obj = NULL; r600_init()
3276 r600_ih_ring_init(rdev, 64 * 1024); r600_init()
3278 r = r600_pcie_gart_init(rdev); r600_init()
3282 rdev->accel_working = true; r600_init()
3283 r = r600_startup(rdev); r600_init()
3285 dev_err(rdev->dev, "disabling GPU acceleration\n"); r600_init()
3286 r600_cp_fini(rdev); r600_init()
3287 r600_irq_fini(rdev); r600_init()
3288 radeon_wb_fini(rdev); r600_init()
3289 radeon_ib_pool_fini(rdev); r600_init()
3290 radeon_irq_kms_fini(rdev); r600_init()
3291 r600_pcie_gart_fini(rdev); r600_init()
3292 rdev->accel_working = false; r600_init()
3298 void r600_fini(struct radeon_device *rdev) r600_fini() argument
3300 radeon_pm_fini(rdev); r600_fini()
3301 radeon_audio_fini(rdev); r600_fini()
3302 r600_cp_fini(rdev); r600_fini()
3303 r600_irq_fini(rdev); r600_fini()
3304 if (rdev->has_uvd) { r600_fini()
3305 uvd_v1_0_fini(rdev); r600_fini()
3306 radeon_uvd_fini(rdev); r600_fini()
3308 radeon_wb_fini(rdev); r600_fini()
3309 radeon_ib_pool_fini(rdev); r600_fini()
3310 radeon_irq_kms_fini(rdev); r600_fini()
3311 r600_pcie_gart_fini(rdev); r600_fini()
3312 r600_vram_scratch_fini(rdev); r600_fini()
3313 radeon_agp_fini(rdev); r600_fini()
3314 radeon_gem_fini(rdev); r600_fini()
3315 radeon_fence_driver_fini(rdev); r600_fini()
3316 radeon_bo_fini(rdev); r600_fini()
3317 radeon_atombios_fini(rdev); r600_fini()
3318 kfree(rdev->bios); r600_fini()
3319 rdev->bios = NULL; r600_fini()
3326 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) r600_ring_ib_execute() argument
3328 struct radeon_ring *ring = &rdev->ring[ib->ring]; r600_ring_ib_execute()
3337 } else if (rdev->wb.enabled) { r600_ring_ib_execute()
3356 int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) r600_ib_test() argument
3364 r = radeon_scratch_get(rdev, &scratch); r600_ib_test()
3370 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); r600_ib_test()
3379 r = radeon_ib_schedule(rdev, &ib, NULL, false); r600_ib_test()
3389 for (i = 0; i < rdev->usec_timeout; i++) { r600_ib_test()
3395 if (i < rdev->usec_timeout) { r600_ib_test()
3403 radeon_ib_free(rdev, &ib); r600_ib_test()
3405 radeon_scratch_free(rdev, scratch); r600_ib_test()
3420 void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size) r600_ih_ring_init() argument
3427 rdev->ih.ring_size = ring_size; r600_ih_ring_init()
3428 rdev->ih.ptr_mask = rdev->ih.ring_size - 1; r600_ih_ring_init()
3429 rdev->ih.rptr = 0; r600_ih_ring_init()
3432 int r600_ih_ring_alloc(struct radeon_device *rdev) r600_ih_ring_alloc() argument
3437 if (rdev->ih.ring_obj == NULL) { r600_ih_ring_alloc()
3438 r = radeon_bo_create(rdev, rdev->ih.ring_size, r600_ih_ring_alloc()
3441 NULL, NULL, &rdev->ih.ring_obj); r600_ih_ring_alloc()
3446 r = radeon_bo_reserve(rdev->ih.ring_obj, false); r600_ih_ring_alloc()
3449 r = radeon_bo_pin(rdev->ih.ring_obj, r600_ih_ring_alloc()
3451 &rdev->ih.gpu_addr); r600_ih_ring_alloc()
3453 radeon_bo_unreserve(rdev->ih.ring_obj); r600_ih_ring_alloc()
3457 r = radeon_bo_kmap(rdev->ih.ring_obj, r600_ih_ring_alloc()
3458 (void **)&rdev->ih.ring); r600_ih_ring_alloc()
3459 radeon_bo_unreserve(rdev->ih.ring_obj); r600_ih_ring_alloc()
3468 void r600_ih_ring_fini(struct radeon_device *rdev) r600_ih_ring_fini() argument
3471 if (rdev->ih.ring_obj) { r600_ih_ring_fini()
3472 r = radeon_bo_reserve(rdev->ih.ring_obj, false); r600_ih_ring_fini()
3474 radeon_bo_kunmap(rdev->ih.ring_obj); r600_ih_ring_fini()
3475 radeon_bo_unpin(rdev->ih.ring_obj); r600_ih_ring_fini()
3476 radeon_bo_unreserve(rdev->ih.ring_obj); r600_ih_ring_fini()
3478 radeon_bo_unref(&rdev->ih.ring_obj); r600_ih_ring_fini()
3479 rdev->ih.ring = NULL; r600_ih_ring_fini()
3480 rdev->ih.ring_obj = NULL; r600_ih_ring_fini()
3484 void r600_rlc_stop(struct radeon_device *rdev) r600_rlc_stop() argument
3487 if ((rdev->family >= CHIP_RV770) && r600_rlc_stop()
3488 (rdev->family <= CHIP_RV740)) { r600_rlc_stop()
3500 static void r600_rlc_start(struct radeon_device *rdev) r600_rlc_start() argument
3505 static int r600_rlc_resume(struct radeon_device *rdev) r600_rlc_resume() argument
3510 if (!rdev->rlc_fw) r600_rlc_resume()
3513 r600_rlc_stop(rdev); r600_rlc_resume()
3525 fw_data = (const __be32 *)rdev->rlc_fw->data; r600_rlc_resume()
3526 if (rdev->family >= CHIP_RV770) { r600_rlc_resume()
3539 r600_rlc_start(rdev); r600_rlc_resume()
3544 static void r600_enable_interrupts(struct radeon_device *rdev) r600_enable_interrupts() argument
3553 rdev->ih.enabled = true; r600_enable_interrupts()
3556 void r600_disable_interrupts(struct radeon_device *rdev) r600_disable_interrupts() argument
3568 rdev->ih.enabled = false; r600_disable_interrupts()
3569 rdev->ih.rptr = 0; r600_disable_interrupts()
3572 static void r600_disable_interrupt_state(struct radeon_device *rdev) r600_disable_interrupt_state() argument
3583 if (ASIC_IS_DCE3(rdev)) { r600_disable_interrupt_state()
3594 if (ASIC_IS_DCE32(rdev)) { r600_disable_interrupt_state()
3625 int r600_irq_init(struct radeon_device *rdev) r600_irq_init() argument
3632 ret = r600_ih_ring_alloc(rdev); r600_irq_init()
3637 r600_disable_interrupts(rdev); r600_irq_init()
3640 if (rdev->family >= CHIP_CEDAR) r600_irq_init()
3641 ret = evergreen_rlc_resume(rdev); r600_irq_init()
3643 ret = r600_rlc_resume(rdev); r600_irq_init()
3645 r600_ih_ring_fini(rdev); r600_irq_init()
3651 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8); r600_irq_init()
3661 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); r600_irq_init()
3662 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); r600_irq_init()
3668 if (rdev->wb.enabled) r600_irq_init()
3672 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); r600_irq_init()
3673 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); r600_irq_init()
3684 if (rdev->msi_enabled) r600_irq_init()
3689 if (rdev->family >= CHIP_CEDAR) r600_irq_init()
3690 evergreen_disable_interrupt_state(rdev); r600_irq_init()
3692 r600_disable_interrupt_state(rdev); r600_irq_init()
3695 pci_set_master(rdev->pdev); r600_irq_init()
3698 r600_enable_interrupts(rdev); r600_irq_init()
3703 void r600_irq_suspend(struct radeon_device *rdev) r600_irq_suspend() argument
3705 r600_irq_disable(rdev); r600_irq_suspend()
3706 r600_rlc_stop(rdev); r600_irq_suspend()
3709 void r600_irq_fini(struct radeon_device *rdev) r600_irq_fini() argument
3711 r600_irq_suspend(rdev); r600_irq_fini()
3712 r600_ih_ring_fini(rdev); r600_irq_fini()
3715 int r600_irq_set(struct radeon_device *rdev) r600_irq_set() argument
3725 if (!rdev->irq.installed) { r600_irq_set()
3730 if (!rdev->ih.enabled) { r600_irq_set()
3731 r600_disable_interrupts(rdev); r600_irq_set()
3733 r600_disable_interrupt_state(rdev); r600_irq_set()
3737 if (ASIC_IS_DCE3(rdev)) { r600_irq_set()
3742 if (ASIC_IS_DCE32(rdev)) { r600_irq_set()
3761 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) { r600_irq_set()
3764 } else if (rdev->family >= CHIP_RV770) { r600_irq_set()
3768 if (rdev->irq.dpm_thermal) { r600_irq_set()
3773 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { r600_irq_set()
3779 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { r600_irq_set()
3784 if (rdev->irq.crtc_vblank_int[0] || r600_irq_set()
3785 atomic_read(&rdev->irq.pflip[0])) { r600_irq_set()
3789 if (rdev->irq.crtc_vblank_int[1] || r600_irq_set()
3790 atomic_read(&rdev->irq.pflip[1])) { r600_irq_set()
3794 if (rdev->irq.hpd[0]) { r600_irq_set()
3798 if (rdev->irq.hpd[1]) { r600_irq_set()
3802 if (rdev->irq.hpd[2]) { r600_irq_set()
3806 if (rdev->irq.hpd[3]) { r600_irq_set()
3810 if (rdev->irq.hpd[4]) { r600_irq_set()
3814 if (rdev->irq.hpd[5]) { r600_irq_set()
3818 if (rdev->irq.afmt[0]) { r600_irq_set()
3822 if (rdev->irq.afmt[1]) { r600_irq_set()
3833 if (ASIC_IS_DCE3(rdev)) { r600_irq_set()
3838 if (ASIC_IS_DCE32(rdev)) { r600_irq_set()
3854 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) { r600_irq_set()
3856 } else if (rdev->family >= CHIP_RV770) { r600_irq_set()
3866 static void r600_irq_ack(struct radeon_device *rdev) r600_irq_ack() argument
3870 if (ASIC_IS_DCE3(rdev)) { r600_irq_ack()
3871 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS); r600_irq_ack()
3872 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE); r600_irq_ack()
3873 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2); r600_irq_ack()
3874 if (ASIC_IS_DCE32(rdev)) { r600_irq_ack()
3875 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0); r600_irq_ack()
3876 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1); r600_irq_ack()
3878 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS); r600_irq_ack()
3879 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS); r600_irq_ack()
3882 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS); r600_irq_ack()
3883 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); r600_irq_ack()
3884 rdev->irq.stat_regs.r600.disp_int_cont2 = 0; r600_irq_ack()
3885 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS); r600_irq_ack()
3886 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS); r600_irq_ack()
3888 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS); r600_irq_ack()
3889 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS); r600_irq_ack()
3891 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED) r600_irq_ack()
3893 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED) r600_irq_ack()
3895 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT) r600_irq_ack()
3897 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT) r600_irq_ack()
3899 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT) r600_irq_ack()
3901 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT) r600_irq_ack()
3903 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) { r600_irq_ack()
3904 if (ASIC_IS_DCE3(rdev)) { r600_irq_ack()
3914 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) { r600_irq_ack()
3915 if (ASIC_IS_DCE3(rdev)) { r600_irq_ack()
3925 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) { r600_irq_ack()
3926 if (ASIC_IS_DCE3(rdev)) { r600_irq_ack()
3936 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) { r600_irq_ack()
3941 if (ASIC_IS_DCE32(rdev)) { r600_irq_ack()
3942 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) { r600_irq_ack()
3947 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) { r600_irq_ack()
3952 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) { r600_irq_ack()
3957 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) { r600_irq_ack()
3963 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) { r600_irq_ack()
3968 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) { r600_irq_ack()
3969 if (ASIC_IS_DCE3(rdev)) { r600_irq_ack()
3982 void r600_irq_disable(struct radeon_device *rdev) r600_irq_disable() argument
3984 r600_disable_interrupts(rdev); r600_irq_disable()
3987 r600_irq_ack(rdev); r600_irq_disable()
3988 r600_disable_interrupt_state(rdev); r600_irq_disable()
3991 static u32 r600_get_ih_wptr(struct radeon_device *rdev) r600_get_ih_wptr() argument
3995 if (rdev->wb.enabled) r600_get_ih_wptr()
3996 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); r600_get_ih_wptr()
4006 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", r600_get_ih_wptr()
4007 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); r600_get_ih_wptr()
4008 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; r600_get_ih_wptr()
4013 return (wptr & rdev->ih.ptr_mask); r600_get_ih_wptr()
4046 int r600_irq_process(struct radeon_device *rdev) r600_irq_process() argument
4056 if (!rdev->ih.enabled || rdev->shutdown) r600_irq_process()
4060 if (!rdev->msi_enabled) r600_irq_process()
4063 wptr = r600_get_ih_wptr(rdev); r600_irq_process()
4067 if (atomic_xchg(&rdev->ih.lock, 1)) r600_irq_process()
4070 rptr = rdev->ih.rptr; r600_irq_process()
4077 r600_irq_ack(rdev); r600_irq_process()
4082 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; r600_irq_process()
4083 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; r600_irq_process()
4089 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)) r600_irq_process()
4092 if (rdev->irq.crtc_vblank_int[0]) { r600_irq_process()
4093 drm_handle_vblank(rdev->ddev, 0); r600_irq_process()
4094 rdev->pm.vblank_sync = true; r600_irq_process()
4095 wake_up(&rdev->irq.vblank_queue); r600_irq_process()
4097 if (atomic_read(&rdev->irq.pflip[0])) r600_irq_process()
4098 radeon_crtc_handle_vblank(rdev, 0); r600_irq_process()
4099 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT; r600_irq_process()
4104 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)) r600_irq_process()
4107 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT; r600_irq_process()
4119 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)) r600_irq_process()
4122 if (rdev->irq.crtc_vblank_int[1]) { r600_irq_process()
4123 drm_handle_vblank(rdev->ddev, 1); r600_irq_process()
4124 rdev->pm.vblank_sync = true; r600_irq_process()
4125 wake_up(&rdev->irq.vblank_queue); r600_irq_process()
4127 if (atomic_read(&rdev->irq.pflip[1])) r600_irq_process()
4128 radeon_crtc_handle_vblank(rdev, 1); r600_irq_process()
4129 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT; r600_irq_process()
4134 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)) r600_irq_process()
4137 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT; r600_irq_process()
4149 radeon_crtc_handle_flip(rdev, 0); r600_irq_process()
4154 radeon_crtc_handle_flip(rdev, 1); r600_irq_process()
4159 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT)) r600_irq_process()
4162 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT; r600_irq_process()
4167 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT)) r600_irq_process()
4170 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT; r600_irq_process()
4175 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT)) r600_irq_process()
4178 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT; r600_irq_process()
4183 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT)) r600_irq_process()
4186 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT; r600_irq_process()
4191 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT)) r600_irq_process()
4194 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT; r600_irq_process()
4199 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT)) r600_irq_process()
4202 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT; r600_irq_process()
4215 if (!(rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG)) r600_irq_process()
4218 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG; r600_irq_process()
4224 if (!(rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG)) r600_irq_process()
4227 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG; r600_irq_process()
4239 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); r600_irq_process()
4245 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); r600_irq_process()
4249 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); r600_irq_process()
4253 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); r600_irq_process()
4257 rdev->pm.dpm.thermal.high_to_low = false; r600_irq_process()
4262 rdev->pm.dpm.thermal.high_to_low = true; r600_irq_process()
4275 rptr &= rdev->ih.ptr_mask; r600_irq_process()
4279 schedule_delayed_work(&rdev->hotplug_work, 0); r600_irq_process()
4281 schedule_work(&rdev->audio_work); r600_irq_process()
4282 if (queue_thermal && rdev->pm.dpm_enabled) r600_irq_process()
4283 schedule_work(&rdev->pm.dpm.thermal.work); r600_irq_process()
4284 rdev->ih.rptr = rptr; r600_irq_process()
4285 atomic_set(&rdev->ih.lock, 0); r600_irq_process()
4288 wptr = r600_get_ih_wptr(rdev); r600_irq_process()
4304 struct radeon_device *rdev = dev->dev_private; r600_debugfs_mc_info() local
4306 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS); r600_debugfs_mc_info()
4307 DREG32_SYS(m, rdev, VM_L2_STATUS); r600_debugfs_mc_info()
4316 int r600_debugfs_mc_info_init(struct radeon_device *rdev) r600_debugfs_mc_info_init() argument
4319 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list)); r600_debugfs_mc_info_init()
4327 * rdev: radeon device structure
4334 void r600_mmio_hdp_flush(struct radeon_device *rdev) r600_mmio_hdp_flush() argument
4341 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) && r600_mmio_hdp_flush()
4342 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) { r600_mmio_hdp_flush()
4343 void __iomem *ptr = (void *)rdev->vram_scratch.ptr; r600_mmio_hdp_flush()
4352 void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes) r600_set_pcie_lanes() argument
4356 if (rdev->flags & RADEON_IS_IGP) r600_set_pcie_lanes()
4359 if (!(rdev->flags & RADEON_IS_PCIE)) r600_set_pcie_lanes()
4363 if (ASIC_IS_X2(rdev)) r600_set_pcie_lanes()
4366 radeon_gui_idle(rdev); r600_set_pcie_lanes()
4405 int r600_get_pcie_lanes(struct radeon_device *rdev) r600_get_pcie_lanes() argument
4409 if (rdev->flags & RADEON_IS_IGP) r600_get_pcie_lanes()
4412 if (!(rdev->flags & RADEON_IS_PCIE)) r600_get_pcie_lanes()
4416 if (ASIC_IS_X2(rdev)) r600_get_pcie_lanes()
4419 radeon_gui_idle(rdev); r600_get_pcie_lanes()
4442 static void r600_pcie_gen2_enable(struct radeon_device *rdev) r600_pcie_gen2_enable() argument
4450 if (rdev->flags & RADEON_IS_IGP) r600_pcie_gen2_enable()
4453 if (!(rdev->flags & RADEON_IS_PCIE)) r600_pcie_gen2_enable()
4457 if (ASIC_IS_X2(rdev)) r600_pcie_gen2_enable()
4461 if (rdev->family <= CHIP_R600) r600_pcie_gen2_enable()
4464 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) && r600_pcie_gen2_enable()
4465 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT)) r600_pcie_gen2_enable()
4477 if ((rdev->family == CHIP_RV670) || r600_pcie_gen2_enable()
4478 (rdev->family == CHIP_RV620) || r600_pcie_gen2_enable()
4479 (rdev->family == CHIP_RV635)) { r600_pcie_gen2_enable()
4502 if ((rdev->family == CHIP_RV670) || r600_pcie_gen2_enable()
4503 (rdev->family == CHIP_RV620) || r600_pcie_gen2_enable()
4504 (rdev->family == CHIP_RV635)) { r600_pcie_gen2_enable()
4529 if ((rdev->family == CHIP_RV670) || r600_pcie_gen2_enable()
4530 (rdev->family == CHIP_RV620) || r600_pcie_gen2_enable()
4531 (rdev->family == CHIP_RV635)) { r600_pcie_gen2_enable()
4559 * @rdev: radeon_device pointer
4564 uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev) r600_get_gpu_clock_counter() argument
4568 mutex_lock(&rdev->gpu_clock_mutex); r600_get_gpu_clock_counter()
4572 mutex_unlock(&rdev->gpu_clock_mutex); r600_get_gpu_clock_counter()
H A Dradeon_device.c137 struct radeon_device *rdev = dev->dev_private; radeon_is_px() local
139 if (rdev->flags & RADEON_IS_PX) radeon_is_px()
144 static void radeon_device_handle_px_quirks(struct radeon_device *rdev) radeon_device_handle_px_quirks() argument
150 if (rdev->pdev->vendor == p->chip_vendor && radeon_device_handle_px_quirks()
151 rdev->pdev->device == p->chip_device && radeon_device_handle_px_quirks()
152 rdev->pdev->subsystem_vendor == p->subsys_vendor && radeon_device_handle_px_quirks()
153 rdev->pdev->subsystem_device == p->subsys_device) { radeon_device_handle_px_quirks()
154 rdev->px_quirk_flags = p->px_quirk_flags; radeon_device_handle_px_quirks()
160 if (rdev->px_quirk_flags & RADEON_PX_QUIRK_DISABLE_PX) radeon_device_handle_px_quirks()
161 rdev->flags &= ~RADEON_IS_PX; radeon_device_handle_px_quirks()
167 * @rdev: radeon_device pointer
174 void radeon_program_register_sequence(struct radeon_device *rdev, radeon_program_register_sequence() argument
200 void radeon_pci_config_reset(struct radeon_device *rdev) radeon_pci_config_reset() argument
202 pci_write_config_dword(rdev->pdev, 0x7c, RADEON_ASIC_RESET_DATA); radeon_pci_config_reset()
208 * @rdev: radeon_device pointer
212 void radeon_surface_init(struct radeon_device *rdev) radeon_surface_init() argument
215 if (rdev->family < CHIP_R600) { radeon_surface_init()
219 if (rdev->surface_regs[i].bo) radeon_surface_init()
220 radeon_bo_get_surface_reg(rdev->surface_regs[i].bo); radeon_surface_init()
222 radeon_clear_surface_reg(rdev, i); radeon_surface_init()
235 * @rdev: radeon_device pointer
239 void radeon_scratch_init(struct radeon_device *rdev) radeon_scratch_init() argument
244 if (rdev->family < CHIP_R300) { radeon_scratch_init()
245 rdev->scratch.num_reg = 5; radeon_scratch_init()
247 rdev->scratch.num_reg = 7; radeon_scratch_init()
249 rdev->scratch.reg_base = RADEON_SCRATCH_REG0; radeon_scratch_init()
250 for (i = 0; i < rdev->scratch.num_reg; i++) { radeon_scratch_init()
251 rdev->scratch.free[i] = true; radeon_scratch_init()
252 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); radeon_scratch_init()
259 * @rdev: radeon_device pointer
265 int radeon_scratch_get(struct radeon_device *rdev, uint32_t *reg) radeon_scratch_get() argument
269 for (i = 0; i < rdev->scratch.num_reg; i++) { radeon_scratch_get()
270 if (rdev->scratch.free[i]) { radeon_scratch_get()
271 rdev->scratch.free[i] = false; radeon_scratch_get()
272 *reg = rdev->scratch.reg[i]; radeon_scratch_get()
282 * @rdev: radeon_device pointer
287 void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg) radeon_scratch_free() argument
291 for (i = 0; i < rdev->scratch.num_reg; i++) { radeon_scratch_free()
292 if (rdev->scratch.reg[i] == reg) { radeon_scratch_free()
293 rdev->scratch.free[i] = true; radeon_scratch_free()
305 * @rdev: radeon_device pointer
310 static int radeon_doorbell_init(struct radeon_device *rdev) radeon_doorbell_init() argument
313 rdev->doorbell.base = pci_resource_start(rdev->pdev, 2); radeon_doorbell_init()
314 rdev->doorbell.size = pci_resource_len(rdev->pdev, 2); radeon_doorbell_init()
316 rdev->doorbell.num_doorbells = min_t(u32, rdev->doorbell.size / sizeof(u32), RADEON_MAX_DOORBELLS); radeon_doorbell_init()
317 if (rdev->doorbell.num_doorbells == 0) radeon_doorbell_init()
320 rdev->doorbell.ptr = ioremap(rdev->doorbell.base, rdev->doorbell.num_doorbells * sizeof(u32)); radeon_doorbell_init()
321 if (rdev->doorbell.ptr == NULL) { radeon_doorbell_init()
324 DRM_INFO("doorbell mmio base: 0x%08X\n", (uint32_t)rdev->doorbell.base); radeon_doorbell_init()
325 DRM_INFO("doorbell mmio size: %u\n", (unsigned)rdev->doorbell.size); radeon_doorbell_init()
327 memset(&rdev->doorbell.used, 0, sizeof(rdev->doorbell.used)); radeon_doorbell_init()
335 * @rdev: radeon_device pointer
339 static void radeon_doorbell_fini(struct radeon_device *rdev) radeon_doorbell_fini() argument
341 iounmap(rdev->doorbell.ptr); radeon_doorbell_fini()
342 rdev->doorbell.ptr = NULL; radeon_doorbell_fini()
348 * @rdev: radeon_device pointer
354 int radeon_doorbell_get(struct radeon_device *rdev, u32 *doorbell) radeon_doorbell_get() argument
356 unsigned long offset = find_first_zero_bit(rdev->doorbell.used, rdev->doorbell.num_doorbells); radeon_doorbell_get()
357 if (offset < rdev->doorbell.num_doorbells) { radeon_doorbell_get()
358 __set_bit(offset, rdev->doorbell.used); radeon_doorbell_get()
369 * @rdev: radeon_device pointer
374 void radeon_doorbell_free(struct radeon_device *rdev, u32 doorbell) radeon_doorbell_free() argument
376 if (doorbell < rdev->doorbell.num_doorbells) radeon_doorbell_free()
377 __clear_bit(doorbell, rdev->doorbell.used); radeon_doorbell_free()
384 * @rdev: radeon_device pointer
393 void radeon_doorbell_get_kfd_info(struct radeon_device *rdev, radeon_doorbell_get_kfd_info() argument
400 if (rdev->doorbell.size > rdev->doorbell.num_doorbells * sizeof(u32)) { radeon_doorbell_get_kfd_info()
401 *aperture_base = rdev->doorbell.base; radeon_doorbell_get_kfd_info()
402 *aperture_size = rdev->doorbell.size; radeon_doorbell_get_kfd_info()
403 *start_offset = rdev->doorbell.num_doorbells * sizeof(u32); radeon_doorbell_get_kfd_info()
421 * @rdev: radeon_device pointer
425 void radeon_wb_disable(struct radeon_device *rdev) radeon_wb_disable() argument
427 rdev->wb.enabled = false; radeon_wb_disable()
433 * @rdev: radeon_device pointer
438 void radeon_wb_fini(struct radeon_device *rdev) radeon_wb_fini() argument
440 radeon_wb_disable(rdev); radeon_wb_fini()
441 if (rdev->wb.wb_obj) { radeon_wb_fini()
442 if (!radeon_bo_reserve(rdev->wb.wb_obj, false)) { radeon_wb_fini()
443 radeon_bo_kunmap(rdev->wb.wb_obj); radeon_wb_fini()
444 radeon_bo_unpin(rdev->wb.wb_obj); radeon_wb_fini()
445 radeon_bo_unreserve(rdev->wb.wb_obj); radeon_wb_fini()
447 radeon_bo_unref(&rdev->wb.wb_obj); radeon_wb_fini()
448 rdev->wb.wb = NULL; radeon_wb_fini()
449 rdev->wb.wb_obj = NULL; radeon_wb_fini()
456 * @rdev: radeon_device pointer
462 int radeon_wb_init(struct radeon_device *rdev) radeon_wb_init() argument
466 if (rdev->wb.wb_obj == NULL) { radeon_wb_init()
467 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE, PAGE_SIZE, true, radeon_wb_init()
469 &rdev->wb.wb_obj); radeon_wb_init()
471 dev_warn(rdev->dev, "(%d) create WB bo failed\n", r); radeon_wb_init()
474 r = radeon_bo_reserve(rdev->wb.wb_obj, false); radeon_wb_init()
476 radeon_wb_fini(rdev); radeon_wb_init()
479 r = radeon_bo_pin(rdev->wb.wb_obj, RADEON_GEM_DOMAIN_GTT, radeon_wb_init()
480 &rdev->wb.gpu_addr); radeon_wb_init()
482 radeon_bo_unreserve(rdev->wb.wb_obj); radeon_wb_init()
483 dev_warn(rdev->dev, "(%d) pin WB bo failed\n", r); radeon_wb_init()
484 radeon_wb_fini(rdev); radeon_wb_init()
487 r = radeon_bo_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb); radeon_wb_init()
488 radeon_bo_unreserve(rdev->wb.wb_obj); radeon_wb_init()
490 dev_warn(rdev->dev, "(%d) map WB bo failed\n", r); radeon_wb_init()
491 radeon_wb_fini(rdev); radeon_wb_init()
497 memset((char *)rdev->wb.wb, 0, RADEON_GPU_PAGE_SIZE); radeon_wb_init()
499 rdev->wb.use_event = false; radeon_wb_init()
502 rdev->wb.enabled = false; radeon_wb_init()
504 if (rdev->flags & RADEON_IS_AGP) { radeon_wb_init()
506 rdev->wb.enabled = false; radeon_wb_init()
507 } else if (rdev->family < CHIP_R300) { radeon_wb_init()
509 rdev->wb.enabled = false; radeon_wb_init()
511 rdev->wb.enabled = true; radeon_wb_init()
513 if (rdev->family >= CHIP_R600) { radeon_wb_init()
514 rdev->wb.use_event = true; radeon_wb_init()
519 if (rdev->family >= CHIP_PALM) { radeon_wb_init()
520 rdev->wb.enabled = true; radeon_wb_init()
521 rdev->wb.use_event = true; radeon_wb_init()
524 dev_info(rdev->dev, "WB %sabled\n", rdev->wb.enabled ? "en" : "dis"); radeon_wb_init()
531 * @rdev: radeon device structure holding all necessary informations
570 void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base) radeon_vram_location() argument
575 if (mc->mc_vram_size > (rdev->mc.mc_mask - base + 1)) { radeon_vram_location()
576 dev_warn(rdev->dev, "limiting VRAM to PCI aperture size\n"); radeon_vram_location()
581 if (rdev->flags & RADEON_IS_AGP && mc->vram_end > mc->gtt_start && mc->vram_start <= mc->gtt_end) { radeon_vram_location()
582 dev_warn(rdev->dev, "limiting VRAM to PCI aperture size\n"); radeon_vram_location()
589 dev_info(rdev->dev, "VRAM: %lluM 0x%016llX - 0x%016llX (%lluM used)\n", radeon_vram_location()
596 * @rdev: radeon device structure holding all necessary informations
606 void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc) radeon_gtt_location() argument
610 size_af = ((rdev->mc.mc_mask - mc->vram_end) + mc->gtt_base_align) & ~mc->gtt_base_align; radeon_gtt_location()
614 dev_warn(rdev->dev, "limiting GTT\n"); radeon_gtt_location()
620 dev_warn(rdev->dev, "limiting GTT\n"); radeon_gtt_location()
626 dev_info(rdev->dev, "GTT: %lluM 0x%016llX - 0x%016llX\n", radeon_gtt_location()
636 * @rdev: radeon_device pointer
642 bool radeon_card_posted(struct radeon_device *rdev) radeon_card_posted() argument
648 (rdev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && radeon_card_posted()
649 (rdev->family < CHIP_R600)) radeon_card_posted()
652 if (ASIC_IS_NODCE(rdev)) radeon_card_posted()
656 if (ASIC_IS_DCE4(rdev)) { radeon_card_posted()
659 if (rdev->num_crtc >= 4) { radeon_card_posted()
663 if (rdev->num_crtc >= 6) { radeon_card_posted()
669 } else if (ASIC_IS_AVIVO(rdev)) { radeon_card_posted()
685 if (rdev->family >= CHIP_R600) radeon_card_posted()
700 * @rdev: radeon_device pointer
705 void radeon_update_bandwidth_info(struct radeon_device *rdev) radeon_update_bandwidth_info() argument
708 u32 sclk = rdev->pm.current_sclk; radeon_update_bandwidth_info()
709 u32 mclk = rdev->pm.current_mclk; radeon_update_bandwidth_info()
713 rdev->pm.sclk.full = dfixed_const(sclk); radeon_update_bandwidth_info()
714 rdev->pm.sclk.full = dfixed_div(rdev->pm.sclk, a); radeon_update_bandwidth_info()
715 rdev->pm.mclk.full = dfixed_const(mclk); radeon_update_bandwidth_info()
716 rdev->pm.mclk.full = dfixed_div(rdev->pm.mclk, a); radeon_update_bandwidth_info()
718 if (rdev->flags & RADEON_IS_IGP) { radeon_update_bandwidth_info()
721 rdev->pm.core_bandwidth.full = dfixed_div(rdev->pm.sclk, a); radeon_update_bandwidth_info()
728 * @rdev: radeon_device pointer
734 bool radeon_boot_test_post_card(struct radeon_device *rdev) radeon_boot_test_post_card() argument
736 if (radeon_card_posted(rdev)) radeon_boot_test_post_card()
739 if (rdev->bios) { radeon_boot_test_post_card()
741 if (rdev->is_atom_bios) radeon_boot_test_post_card()
742 atom_asic_init(rdev->mode_info.atom_context); radeon_boot_test_post_card()
744 radeon_combios_asic_init(rdev->ddev); radeon_boot_test_post_card()
747 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); radeon_boot_test_post_card()
755 * @rdev: radeon_device pointer
762 int radeon_dummy_page_init(struct radeon_device *rdev) radeon_dummy_page_init() argument
764 if (rdev->dummy_page.page) radeon_dummy_page_init()
766 rdev->dummy_page.page = alloc_page(GFP_DMA32 | GFP_KERNEL | __GFP_ZERO); radeon_dummy_page_init()
767 if (rdev->dummy_page.page == NULL) radeon_dummy_page_init()
769 rdev->dummy_page.addr = pci_map_page(rdev->pdev, rdev->dummy_page.page, radeon_dummy_page_init()
771 if (pci_dma_mapping_error(rdev->pdev, rdev->dummy_page.addr)) { radeon_dummy_page_init()
772 dev_err(&rdev->pdev->dev, "Failed to DMA MAP the dummy page\n"); radeon_dummy_page_init()
773 __free_page(rdev->dummy_page.page); radeon_dummy_page_init()
774 rdev->dummy_page.page = NULL; radeon_dummy_page_init()
777 rdev->dummy_page.entry = radeon_gart_get_page_entry(rdev->dummy_page.addr, radeon_dummy_page_init()
785 * @rdev: radeon_device pointer
789 void radeon_dummy_page_fini(struct radeon_device *rdev) radeon_dummy_page_fini() argument
791 if (rdev->dummy_page.page == NULL) radeon_dummy_page_fini()
793 pci_unmap_page(rdev->pdev, rdev->dummy_page.addr, radeon_dummy_page_fini()
795 __free_page(rdev->dummy_page.page); radeon_dummy_page_fini()
796 rdev->dummy_page.page = NULL; radeon_dummy_page_fini()
820 struct radeon_device *rdev = info->dev->dev_private; cail_pll_read() local
823 r = rdev->pll_rreg(rdev, reg); cail_pll_read()
838 struct radeon_device *rdev = info->dev->dev_private; cail_pll_write() local
840 rdev->pll_wreg(rdev, reg, val); cail_pll_write()
854 struct radeon_device *rdev = info->dev->dev_private; cail_mc_read() local
857 r = rdev->mc_rreg(rdev, reg); cail_mc_read()
872 struct radeon_device *rdev = info->dev->dev_private; cail_mc_write() local
874 rdev->mc_wreg(rdev, reg, val); cail_mc_write()
888 struct radeon_device *rdev = info->dev->dev_private; cail_reg_write() local
904 struct radeon_device *rdev = info->dev->dev_private; cail_reg_read() local
922 struct radeon_device *rdev = info->dev->dev_private; cail_ioreg_write() local
938 struct radeon_device *rdev = info->dev->dev_private; cail_ioreg_read() local
948 * @rdev: radeon_device pointer
955 int radeon_atombios_init(struct radeon_device *rdev) radeon_atombios_init() argument
963 rdev->mode_info.atom_card_info = atom_card_info; radeon_atombios_init()
964 atom_card_info->dev = rdev->ddev; radeon_atombios_init()
968 if (rdev->rio_mem) { radeon_atombios_init()
981 rdev->mode_info.atom_context = atom_parse(atom_card_info, rdev->bios); radeon_atombios_init()
982 if (!rdev->mode_info.atom_context) { radeon_atombios_init()
983 radeon_atombios_fini(rdev); radeon_atombios_init()
987 mutex_init(&rdev->mode_info.atom_context->mutex); radeon_atombios_init()
988 mutex_init(&rdev->mode_info.atom_context->scratch_mutex); radeon_atombios_init()
989 radeon_atom_initialize_bios_scratch_regs(rdev->ddev); radeon_atombios_init()
990 atom_allocate_fb_scratch(rdev->mode_info.atom_context); radeon_atombios_init()
997 * @rdev: radeon_device pointer
1003 void radeon_atombios_fini(struct radeon_device *rdev) radeon_atombios_fini() argument
1005 if (rdev->mode_info.atom_context) { radeon_atombios_fini()
1006 kfree(rdev->mode_info.atom_context->scratch); radeon_atombios_fini()
1008 kfree(rdev->mode_info.atom_context); radeon_atombios_fini()
1009 rdev->mode_info.atom_context = NULL; radeon_atombios_fini()
1010 kfree(rdev->mode_info.atom_card_info); radeon_atombios_fini()
1011 rdev->mode_info.atom_card_info = NULL; radeon_atombios_fini()
1024 * @rdev: radeon_device pointer
1030 int radeon_combios_init(struct radeon_device *rdev) radeon_combios_init() argument
1032 radeon_combios_initialize_bios_scratch_regs(rdev->ddev); radeon_combios_init()
1039 * @rdev: radeon_device pointer
1044 void radeon_combios_fini(struct radeon_device *rdev) radeon_combios_fini() argument
1060 struct radeon_device *rdev = cookie; radeon_vga_set_decode() local
1061 radeon_vga_set_state(rdev, state); radeon_vga_set_decode()
1101 * @rdev: radeon_device pointer
1106 static void radeon_check_arguments(struct radeon_device *rdev) radeon_check_arguments() argument
1110 dev_warn(rdev->dev, "vram limit (%d) must be a power of 2\n", radeon_check_arguments()
1116 radeon_gart_size = radeon_gart_size_auto(rdev->family); radeon_check_arguments()
1120 dev_warn(rdev->dev, "gart size (%d) too small\n", radeon_check_arguments()
1122 radeon_gart_size = radeon_gart_size_auto(rdev->family); radeon_check_arguments()
1124 dev_warn(rdev->dev, "gart size (%d) must be a power of 2\n", radeon_check_arguments()
1126 radeon_gart_size = radeon_gart_size_auto(rdev->family); radeon_check_arguments()
1128 rdev->mc.gtt_size = (uint64_t)radeon_gart_size << 20; radeon_check_arguments()
1140 dev_warn(rdev->dev, "invalid AGP mode %d (valid mode: " radeon_check_arguments()
1147 dev_warn(rdev->dev, "VM size (%d) must be a power of 2\n", radeon_check_arguments()
1153 dev_warn(rdev->dev, "VM size (%d) to small, min is 1GB\n", radeon_check_arguments()
1162 dev_warn(rdev->dev, "VM size (%d) too large, max is 1TB\n", radeon_check_arguments()
1183 dev_warn(rdev->dev, "VM page table size (%d) too small\n", radeon_check_arguments()
1190 dev_warn(rdev->dev, "VM page table size (%d) too large\n", radeon_check_arguments()
1208 struct radeon_device *rdev = dev->dev_private; radeon_switcheroo_set_state() local
1220 if (d3_delay < 20 && (rdev->px_quirk_flags & RADEON_PX_QUIRK_LONG_WAKEUP)) radeon_switcheroo_set_state()
1268 * @rdev: radeon_device pointer
1277 int radeon_device_init(struct radeon_device *rdev, radeon_device_init() argument
1286 rdev->shutdown = false; radeon_device_init()
1287 rdev->dev = &pdev->dev; radeon_device_init()
1288 rdev->ddev = ddev; radeon_device_init()
1289 rdev->pdev = pdev; radeon_device_init()
1290 rdev->flags = flags; radeon_device_init()
1291 rdev->family = flags & RADEON_FAMILY_MASK; radeon_device_init()
1292 rdev->is_atom_bios = false; radeon_device_init()
1293 rdev->usec_timeout = RADEON_MAX_USEC_TIMEOUT; radeon_device_init()
1294 rdev->mc.gtt_size = 512 * 1024 * 1024; radeon_device_init()
1295 rdev->accel_working = false; radeon_device_init()
1298 rdev->ring[i].idx = i; radeon_device_init()
1300 rdev->fence_context = fence_context_alloc(RADEON_NUM_RINGS); radeon_device_init()
1303 radeon_family_name[rdev->family], pdev->vendor, pdev->device, radeon_device_init()
1308 mutex_init(&rdev->ring_lock); radeon_device_init()
1309 mutex_init(&rdev->dc_hw_i2c_mutex); radeon_device_init()
1310 atomic_set(&rdev->ih.lock, 0); radeon_device_init()
1311 mutex_init(&rdev->gem.mutex); radeon_device_init()
1312 mutex_init(&rdev->pm.mutex); radeon_device_init()
1313 mutex_init(&rdev->gpu_clock_mutex); radeon_device_init()
1314 mutex_init(&rdev->srbm_mutex); radeon_device_init()
1315 mutex_init(&rdev->grbm_idx_mutex); radeon_device_init()
1316 init_rwsem(&rdev->pm.mclk_lock); radeon_device_init()
1317 init_rwsem(&rdev->exclusive_lock); radeon_device_init()
1318 init_waitqueue_head(&rdev->irq.vblank_queue); radeon_device_init()
1319 mutex_init(&rdev->mn_lock); radeon_device_init()
1320 hash_init(rdev->mn_hash); radeon_device_init()
1321 r = radeon_gem_init(rdev); radeon_device_init()
1325 radeon_check_arguments(rdev); radeon_device_init()
1329 rdev->vm_manager.max_pfn = radeon_vm_size << 18; radeon_device_init()
1332 r = radeon_asic_init(rdev); radeon_device_init()
1339 if ((rdev->family >= CHIP_RS400) && radeon_device_init()
1340 (rdev->flags & RADEON_IS_IGP)) { radeon_device_init()
1341 rdev->flags &= ~RADEON_IS_AGP; radeon_device_init()
1344 if (rdev->flags & RADEON_IS_AGP && radeon_agpmode == -1) { radeon_device_init()
1345 radeon_agp_disable(rdev); radeon_device_init()
1352 if (rdev->family >= CHIP_CAYMAN) radeon_device_init()
1353 rdev->mc.mc_mask = 0xffffffffffULL; /* 40 bit MC */ radeon_device_init()
1354 else if (rdev->family >= CHIP_CEDAR) radeon_device_init()
1355 rdev->mc.mc_mask = 0xfffffffffULL; /* 36 bit MC */ radeon_device_init()
1357 rdev->mc.mc_mask = 0xffffffffULL; /* 32 bit MC */ radeon_device_init()
1365 rdev->need_dma32 = false; radeon_device_init()
1366 if (rdev->flags & RADEON_IS_AGP) radeon_device_init()
1367 rdev->need_dma32 = true; radeon_device_init()
1368 if ((rdev->flags & RADEON_IS_PCI) && radeon_device_init()
1369 (rdev->family <= CHIP_RS740)) radeon_device_init()
1370 rdev->need_dma32 = true; radeon_device_init()
1372 dma_bits = rdev->need_dma32 ? 32 : 40; radeon_device_init()
1373 r = pci_set_dma_mask(rdev->pdev, DMA_BIT_MASK(dma_bits)); radeon_device_init()
1375 rdev->need_dma32 = true; radeon_device_init()
1379 r = pci_set_consistent_dma_mask(rdev->pdev, DMA_BIT_MASK(dma_bits)); radeon_device_init()
1381 pci_set_consistent_dma_mask(rdev->pdev, DMA_BIT_MASK(32)); radeon_device_init()
1387 spin_lock_init(&rdev->mmio_idx_lock); radeon_device_init()
1388 spin_lock_init(&rdev->smc_idx_lock); radeon_device_init()
1389 spin_lock_init(&rdev->pll_idx_lock); radeon_device_init()
1390 spin_lock_init(&rdev->mc_idx_lock); radeon_device_init()
1391 spin_lock_init(&rdev->pcie_idx_lock); radeon_device_init()
1392 spin_lock_init(&rdev->pciep_idx_lock); radeon_device_init()
1393 spin_lock_init(&rdev->pif_idx_lock); radeon_device_init()
1394 spin_lock_init(&rdev->cg_idx_lock); radeon_device_init()
1395 spin_lock_init(&rdev->uvd_idx_lock); radeon_device_init()
1396 spin_lock_init(&rdev->rcu_idx_lock); radeon_device_init()
1397 spin_lock_init(&rdev->didt_idx_lock); radeon_device_init()
1398 spin_lock_init(&rdev->end_idx_lock); radeon_device_init()
1399 if (rdev->family >= CHIP_BONAIRE) { radeon_device_init()
1400 rdev->rmmio_base = pci_resource_start(rdev->pdev, 5); radeon_device_init()
1401 rdev->rmmio_size = pci_resource_len(rdev->pdev, 5); radeon_device_init()
1403 rdev->rmmio_base = pci_resource_start(rdev->pdev, 2); radeon_device_init()
1404 rdev->rmmio_size = pci_resource_len(rdev->pdev, 2); radeon_device_init()
1406 rdev->rmmio = ioremap(rdev->rmmio_base, rdev->rmmio_size); radeon_device_init()
1407 if (rdev->rmmio == NULL) { radeon_device_init()
1410 DRM_INFO("register mmio base: 0x%08X\n", (uint32_t)rdev->rmmio_base); radeon_device_init()
1411 DRM_INFO("register mmio size: %u\n", (unsigned)rdev->rmmio_size); radeon_device_init()
1414 if (rdev->family >= CHIP_BONAIRE) radeon_device_init()
1415 radeon_doorbell_init(rdev); radeon_device_init()
1419 if (pci_resource_flags(rdev->pdev, i) & IORESOURCE_IO) { radeon_device_init()
1420 rdev->rio_mem_size = pci_resource_len(rdev->pdev, i); radeon_device_init()
1421 rdev->rio_mem = pci_iomap(rdev->pdev, i, rdev->rio_mem_size); radeon_device_init()
1425 if (rdev->rio_mem == NULL) radeon_device_init()
1428 if (rdev->flags & RADEON_IS_PX) radeon_device_init()
1429 radeon_device_handle_px_quirks(rdev); radeon_device_init()
1434 vga_client_register(rdev->pdev, rdev, NULL, radeon_vga_set_decode); radeon_device_init()
1436 if (rdev->flags & RADEON_IS_PX) radeon_device_init()
1438 vga_switcheroo_register_client(rdev->pdev, &radeon_switcheroo_ops, runtime); radeon_device_init()
1440 vga_switcheroo_init_domain_pm_ops(rdev->dev, &rdev->vga_pm_domain); radeon_device_init()
1442 r = radeon_init(rdev); radeon_device_init()
1446 r = radeon_gem_debugfs_init(rdev); radeon_device_init()
1451 r = radeon_mst_debugfs_init(rdev); radeon_device_init()
1456 if (rdev->flags & RADEON_IS_AGP && !rdev->accel_working) { radeon_device_init()
1460 radeon_asic_reset(rdev); radeon_device_init()
1461 radeon_fini(rdev); radeon_device_init()
1462 radeon_agp_disable(rdev); radeon_device_init()
1463 r = radeon_init(rdev); radeon_device_init()
1468 r = radeon_ib_ring_tests(rdev); radeon_device_init()
1477 if (rdev->pm.dpm_enabled && radeon_device_init()
1478 (rdev->pm.pm_method == PM_METHOD_DPM) && radeon_device_init()
1479 (rdev->family == CHIP_TURKS) && radeon_device_init()
1480 (rdev->flags & RADEON_IS_MOBILITY)) { radeon_device_init()
1481 mutex_lock(&rdev->pm.mutex); radeon_device_init()
1482 radeon_dpm_disable(rdev); radeon_device_init()
1483 radeon_dpm_enable(rdev); radeon_device_init()
1484 mutex_unlock(&rdev->pm.mutex); radeon_device_init()
1488 if (rdev->accel_working) radeon_device_init()
1489 radeon_test_moves(rdev); radeon_device_init()
1494 if (rdev->accel_working) radeon_device_init()
1495 radeon_test_syncing(rdev); radeon_device_init()
1500 if (rdev->accel_working) radeon_device_init()
1501 radeon_benchmark(rdev, radeon_benchmarking); radeon_device_init()
1509 vga_switcheroo_fini_domain_pm_ops(rdev->dev); radeon_device_init()
1513 static void radeon_debugfs_remove_files(struct radeon_device *rdev);
1518 * @rdev: radeon_device pointer
1523 void radeon_device_fini(struct radeon_device *rdev) radeon_device_fini() argument
1526 rdev->shutdown = true; radeon_device_fini()
1528 radeon_bo_evict_vram(rdev); radeon_device_fini()
1529 radeon_fini(rdev); radeon_device_fini()
1530 vga_switcheroo_unregister_client(rdev->pdev); radeon_device_fini()
1531 if (rdev->flags & RADEON_IS_PX) radeon_device_fini()
1532 vga_switcheroo_fini_domain_pm_ops(rdev->dev); radeon_device_fini()
1533 vga_client_register(rdev->pdev, NULL, NULL, NULL); radeon_device_fini()
1534 if (rdev->rio_mem) radeon_device_fini()
1535 pci_iounmap(rdev->pdev, rdev->rio_mem); radeon_device_fini()
1536 rdev->rio_mem = NULL; radeon_device_fini()
1537 iounmap(rdev->rmmio); radeon_device_fini()
1538 rdev->rmmio = NULL; radeon_device_fini()
1539 if (rdev->family >= CHIP_BONAIRE) radeon_device_fini()
1540 radeon_doorbell_fini(rdev); radeon_device_fini()
1541 radeon_debugfs_remove_files(rdev); radeon_device_fini()
1560 struct radeon_device *rdev; radeon_suspend_kms() local
1569 rdev = dev->dev_private; radeon_suspend_kms()
1603 if (!radeon_fbdev_robj_is_fb(rdev, robj)) { radeon_suspend_kms()
1612 radeon_bo_evict_vram(rdev); radeon_suspend_kms()
1616 r = radeon_fence_wait_empty(rdev, i); radeon_suspend_kms()
1619 radeon_fence_driver_force_completion(rdev, i); radeon_suspend_kms()
1623 radeon_save_bios_scratch_regs(rdev); radeon_suspend_kms()
1625 radeon_suspend(rdev); radeon_suspend_kms()
1626 radeon_hpd_fini(rdev); radeon_suspend_kms()
1628 radeon_bo_evict_vram(rdev); radeon_suspend_kms()
1630 radeon_agp_suspend(rdev); radeon_suspend_kms()
1641 radeon_fbdev_set_suspend(rdev, 1); radeon_suspend_kms()
1659 struct radeon_device *rdev = dev->dev_private; radeon_resume_kms() local
1679 radeon_agp_resume(rdev); radeon_resume_kms()
1680 radeon_resume(rdev); radeon_resume_kms()
1682 r = radeon_ib_ring_tests(rdev); radeon_resume_kms()
1686 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_resume_kms()
1688 r = radeon_pm_late_init(rdev); radeon_resume_kms()
1690 rdev->pm.dpm_enabled = false; radeon_resume_kms()
1695 radeon_pm_resume(rdev); radeon_resume_kms()
1698 radeon_restore_bios_scratch_regs(rdev); radeon_resume_kms()
1711 ASIC_IS_AVIVO(rdev) ? radeon_resume_kms()
1722 if (rdev->is_atom_bios) { radeon_resume_kms()
1723 radeon_atom_encoder_init(rdev); radeon_resume_kms()
1724 radeon_atom_disp_eng_pll_init(rdev); radeon_resume_kms()
1726 if (rdev->mode_info.bl_encoder) { radeon_resume_kms()
1727 u8 bl_level = radeon_get_backlight_level(rdev, radeon_resume_kms()
1728 rdev->mode_info.bl_encoder); radeon_resume_kms()
1729 radeon_set_backlight_level(rdev, rdev->mode_info.bl_encoder, radeon_resume_kms()
1734 radeon_hpd_init(rdev); radeon_resume_kms()
1749 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) radeon_resume_kms()
1750 radeon_pm_compute_clocks(rdev); radeon_resume_kms()
1753 radeon_fbdev_set_suspend(rdev, 0); radeon_resume_kms()
1763 * @rdev: radeon device pointer
1768 int radeon_gpu_reset(struct radeon_device *rdev) radeon_gpu_reset() argument
1778 down_write(&rdev->exclusive_lock); radeon_gpu_reset()
1780 if (!rdev->needs_reset) { radeon_gpu_reset()
1781 up_write(&rdev->exclusive_lock); radeon_gpu_reset()
1785 atomic_inc(&rdev->gpu_reset_counter); radeon_gpu_reset()
1787 radeon_save_bios_scratch_regs(rdev); radeon_gpu_reset()
1789 resched = ttm_bo_lock_delayed_workqueue(&rdev->mman.bdev); radeon_gpu_reset()
1790 radeon_suspend(rdev); radeon_gpu_reset()
1791 radeon_hpd_fini(rdev); radeon_gpu_reset()
1794 ring_sizes[i] = radeon_ring_backup(rdev, &rdev->ring[i], radeon_gpu_reset()
1798 dev_info(rdev->dev, "Saved %d dwords of commands " radeon_gpu_reset()
1803 r = radeon_asic_reset(rdev); radeon_gpu_reset()
1805 dev_info(rdev->dev, "GPU reset succeeded, trying to resume\n"); radeon_gpu_reset()
1806 radeon_resume(rdev); radeon_gpu_reset()
1809 radeon_restore_bios_scratch_regs(rdev); radeon_gpu_reset()
1813 radeon_ring_restore(rdev, &rdev->ring[i], radeon_gpu_reset()
1816 radeon_fence_driver_force_completion(rdev, i); radeon_gpu_reset()
1821 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_gpu_reset()
1823 r = radeon_pm_late_init(rdev); radeon_gpu_reset()
1825 rdev->pm.dpm_enabled = false; radeon_gpu_reset()
1830 radeon_pm_resume(rdev); radeon_gpu_reset()
1834 if (rdev->is_atom_bios) { radeon_gpu_reset()
1835 radeon_atom_encoder_init(rdev); radeon_gpu_reset()
1836 radeon_atom_disp_eng_pll_init(rdev); radeon_gpu_reset()
1838 if (rdev->mode_info.bl_encoder) { radeon_gpu_reset()
1839 u8 bl_level = radeon_get_backlight_level(rdev, radeon_gpu_reset()
1840 rdev->mode_info.bl_encoder); radeon_gpu_reset()
1841 radeon_set_backlight_level(rdev, rdev->mode_info.bl_encoder, radeon_gpu_reset()
1846 radeon_hpd_init(rdev); radeon_gpu_reset()
1848 ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched); radeon_gpu_reset()
1850 rdev->in_reset = true; radeon_gpu_reset()
1851 rdev->needs_reset = false; radeon_gpu_reset()
1853 downgrade_write(&rdev->exclusive_lock); radeon_gpu_reset()
1855 drm_helper_resume_force_mode(rdev->ddev); radeon_gpu_reset()
1858 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) radeon_gpu_reset()
1859 radeon_pm_compute_clocks(rdev); radeon_gpu_reset()
1862 r = radeon_ib_ring_tests(rdev); radeon_gpu_reset()
1867 dev_info(rdev->dev, "GPU reset failed\n"); radeon_gpu_reset()
1870 rdev->needs_reset = r == -EAGAIN; radeon_gpu_reset()
1871 rdev->in_reset = false; radeon_gpu_reset()
1873 up_read(&rdev->exclusive_lock); radeon_gpu_reset()
1881 int radeon_debugfs_add_files(struct radeon_device *rdev, radeon_debugfs_add_files() argument
1887 for (i = 0; i < rdev->debugfs_count; i++) { radeon_debugfs_add_files()
1888 if (rdev->debugfs[i].files == files) { radeon_debugfs_add_files()
1894 i = rdev->debugfs_count + 1; radeon_debugfs_add_files()
1901 rdev->debugfs[rdev->debugfs_count].files = files; radeon_debugfs_add_files()
1902 rdev->debugfs[rdev->debugfs_count].num_files = nfiles; radeon_debugfs_add_files()
1903 rdev->debugfs_count = i; radeon_debugfs_add_files()
1906 rdev->ddev->control->debugfs_root, radeon_debugfs_add_files()
1907 rdev->ddev->control); radeon_debugfs_add_files()
1909 rdev->ddev->primary->debugfs_root, radeon_debugfs_add_files()
1910 rdev->ddev->primary); radeon_debugfs_add_files()
1915 static void radeon_debugfs_remove_files(struct radeon_device *rdev) radeon_debugfs_remove_files() argument
1920 for (i = 0; i < rdev->debugfs_count; i++) { radeon_debugfs_remove_files()
1921 drm_debugfs_remove_files(rdev->debugfs[i].files, radeon_debugfs_remove_files()
1922 rdev->debugfs[i].num_files, radeon_debugfs_remove_files()
1923 rdev->ddev->control); radeon_debugfs_remove_files()
1924 drm_debugfs_remove_files(rdev->debugfs[i].files, radeon_debugfs_remove_files()
1925 rdev->debugfs[i].num_files, radeon_debugfs_remove_files()
1926 rdev->ddev->primary); radeon_debugfs_remove_files()
H A Dradeon.h241 bool radeon_get_bios(struct radeon_device *rdev);
251 int radeon_dummy_page_init(struct radeon_device *rdev);
252 void radeon_dummy_page_fini(struct radeon_device *rdev);
277 int radeon_pm_init(struct radeon_device *rdev);
278 int radeon_pm_late_init(struct radeon_device *rdev);
279 void radeon_pm_fini(struct radeon_device *rdev);
280 void radeon_pm_compute_clocks(struct radeon_device *rdev);
281 void radeon_pm_suspend(struct radeon_device *rdev);
282 void radeon_pm_resume(struct radeon_device *rdev);
283 void radeon_combios_get_power_modes(struct radeon_device *rdev);
284 void radeon_atombios_get_power_modes(struct radeon_device *rdev);
285 int radeon_atom_get_clock_dividers(struct radeon_device *rdev,
290 int radeon_atom_get_memory_pll_dividers(struct radeon_device *rdev,
294 void radeon_atom_set_voltage(struct radeon_device *rdev, u16 voltage_level, u8 voltage_type);
295 int radeon_atom_get_voltage_gpio_settings(struct radeon_device *rdev,
298 void radeon_atom_set_engine_dram_timings(struct radeon_device *rdev,
300 int radeon_atom_get_voltage_step(struct radeon_device *rdev,
302 int radeon_atom_get_max_vddc(struct radeon_device *rdev, u8 voltage_type,
304 int radeon_atom_get_leakage_vddc_based_on_leakage_idx(struct radeon_device *rdev,
307 int radeon_atom_get_leakage_id_from_vbios(struct radeon_device *rdev,
309 int radeon_atom_get_leakage_vddc_based_on_leakage_params(struct radeon_device *rdev,
313 int radeon_atom_get_voltage_evv(struct radeon_device *rdev,
316 int radeon_atom_round_to_true_voltage(struct radeon_device *rdev,
320 int radeon_atom_get_min_voltage(struct radeon_device *rdev,
322 int radeon_atom_get_max_voltage(struct radeon_device *rdev,
324 int radeon_atom_get_voltage_table(struct radeon_device *rdev,
327 bool radeon_atom_is_voltage_gpio(struct radeon_device *rdev,
329 int radeon_atom_get_svi2_info(struct radeon_device *rdev,
332 void radeon_atom_update_memory_dll(struct radeon_device *rdev,
334 void radeon_atom_set_ac_timing(struct radeon_device *rdev,
336 int radeon_atom_init_mc_reg_table(struct radeon_device *rdev,
339 int radeon_atom_get_memory_info(struct radeon_device *rdev,
341 int radeon_atom_get_mclk_range_table(struct radeon_device *rdev,
344 int radeon_atom_get_max_vddc(struct radeon_device *rdev, u8 voltage_type,
346 void rs690_pm_info(struct radeon_device *rdev);
355 struct radeon_device *rdev; member in struct:radeon_fence_driver
369 struct radeon_device *rdev; member in struct:radeon_fence
378 int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring);
379 int radeon_fence_driver_init(struct radeon_device *rdev);
380 void radeon_fence_driver_fini(struct radeon_device *rdev);
381 void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring);
382 int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring);
383 void radeon_fence_process(struct radeon_device *rdev, int ring);
386 int radeon_fence_wait_next(struct radeon_device *rdev, int ring);
387 int radeon_fence_wait_empty(struct radeon_device *rdev, int ring);
388 int radeon_fence_wait_any(struct radeon_device *rdev,
393 unsigned radeon_fence_count_emitted(struct radeon_device *rdev, int ring);
503 struct radeon_device *rdev; member in struct:radeon_bo
514 int radeon_gem_debugfs_init(struct radeon_device *rdev);
572 int radeon_gem_init(struct radeon_device *rdev);
573 void radeon_gem_fini(struct radeon_device *rdev);
574 int radeon_gem_object_create(struct radeon_device *rdev, unsigned long size,
595 int radeon_semaphore_create(struct radeon_device *rdev,
597 bool radeon_semaphore_emit_signal(struct radeon_device *rdev, int ring,
599 bool radeon_semaphore_emit_wait(struct radeon_device *rdev, int ring,
601 void radeon_semaphore_free(struct radeon_device *rdev,
617 int radeon_sync_resv(struct radeon_device *rdev,
621 int radeon_sync_rings(struct radeon_device *rdev,
624 void radeon_sync_free(struct radeon_device *rdev, struct radeon_sync *sync,
655 int radeon_gart_table_ram_alloc(struct radeon_device *rdev);
656 void radeon_gart_table_ram_free(struct radeon_device *rdev);
657 int radeon_gart_table_vram_alloc(struct radeon_device *rdev);
658 void radeon_gart_table_vram_free(struct radeon_device *rdev);
659 int radeon_gart_table_vram_pin(struct radeon_device *rdev);
660 void radeon_gart_table_vram_unpin(struct radeon_device *rdev);
661 int radeon_gart_init(struct radeon_device *rdev);
662 void radeon_gart_fini(struct radeon_device *rdev);
663 void radeon_gart_unbind(struct radeon_device *rdev, unsigned offset,
665 int radeon_gart_bind(struct radeon_device *rdev, unsigned offset,
695 bool radeon_combios_sideport_present(struct radeon_device *rdev);
696 bool radeon_atombios_sideport_present(struct radeon_device *rdev);
708 int radeon_scratch_get(struct radeon_device *rdev, uint32_t *reg);
709 void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg);
725 int radeon_doorbell_get(struct radeon_device *rdev, u32 *page);
726 void radeon_doorbell_free(struct radeon_device *rdev, u32 doorbell);
727 void radeon_doorbell_get_kfd_info(struct radeon_device *rdev,
739 struct radeon_device *rdev; member in struct:radeon_flip_work
819 int radeon_irq_kms_init(struct radeon_device *rdev);
820 void radeon_irq_kms_fini(struct radeon_device *rdev);
821 void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring);
822 bool radeon_irq_kms_sw_irq_get_delayed(struct radeon_device *rdev, int ring);
823 void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring);
824 void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc);
825 void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc);
826 void radeon_irq_kms_enable_afmt(struct radeon_device *rdev, int block);
827 void radeon_irq_kms_disable_afmt(struct radeon_device *rdev, int block);
828 void radeon_irq_kms_enable_hpd(struct radeon_device *rdev, unsigned hpd_mask);
829 void radeon_irq_kms_disable_hpd(struct radeon_device *rdev, unsigned hpd_mask);
1019 int radeon_ib_get(struct radeon_device *rdev, int ring,
1022 void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib);
1023 int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib,
1025 int radeon_ib_pool_init(struct radeon_device *rdev);
1026 void radeon_ib_pool_fini(struct radeon_device *rdev);
1027 int radeon_ib_ring_tests(struct radeon_device *rdev);
1029 bool radeon_ring_supports_scratch_reg(struct radeon_device *rdev,
1031 void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *cp);
1032 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
1033 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
1034 void radeon_ring_commit(struct radeon_device *rdev, struct radeon_ring *cp,
1036 void radeon_ring_unlock_commit(struct radeon_device *rdev, struct radeon_ring *cp,
1039 void radeon_ring_unlock_undo(struct radeon_device *rdev, struct radeon_ring *cp);
1040 int radeon_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
1041 void radeon_ring_lockup_update(struct radeon_device *rdev,
1043 bool radeon_ring_test_lockup(struct radeon_device *rdev, struct radeon_ring *ring);
1044 unsigned radeon_ring_backup(struct radeon_device *rdev, struct radeon_ring *ring,
1046 int radeon_ring_restore(struct radeon_device *rdev, struct radeon_ring *ring,
1048 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size,
1050 void radeon_ring_fini(struct radeon_device *rdev, struct radeon_ring *cp);
1054 void r600_dma_stop(struct radeon_device *rdev);
1055 int r600_dma_resume(struct radeon_device *rdev);
1056 void r600_dma_fini(struct radeon_device *rdev);
1058 void cayman_dma_stop(struct radeon_device *rdev);
1059 int cayman_dma_resume(struct radeon_device *rdev);
1060 void cayman_dma_fini(struct radeon_device *rdev);
1073 struct radeon_device *rdev; member in struct:radeon_cs_parser
1132 int radeon_agp_init(struct radeon_device *rdev);
1133 void radeon_agp_resume(struct radeon_device *rdev);
1134 void radeon_agp_suspend(struct radeon_device *rdev);
1135 void radeon_agp_fini(struct radeon_device *rdev);
1599 void radeon_dpm_enable_uvd(struct radeon_device *rdev, bool enable);
1600 void radeon_dpm_enable_vce(struct radeon_device *rdev, bool enable);
1666 int radeon_pm_get_type_index(struct radeon_device *rdev,
1686 int radeon_uvd_init(struct radeon_device *rdev);
1687 void radeon_uvd_fini(struct radeon_device *rdev);
1688 int radeon_uvd_suspend(struct radeon_device *rdev);
1689 int radeon_uvd_resume(struct radeon_device *rdev);
1690 int radeon_uvd_get_create_msg(struct radeon_device *rdev, int ring,
1692 int radeon_uvd_get_destroy_msg(struct radeon_device *rdev, int ring,
1696 void radeon_uvd_free_handles(struct radeon_device *rdev,
1699 void radeon_uvd_note_usage(struct radeon_device *rdev);
1700 int radeon_uvd_calc_upll_dividers(struct radeon_device *rdev,
1709 int radeon_uvd_send_upll_ctlreq(struct radeon_device *rdev,
1729 int radeon_vce_init(struct radeon_device *rdev);
1730 void radeon_vce_fini(struct radeon_device *rdev);
1731 int radeon_vce_suspend(struct radeon_device *rdev);
1732 int radeon_vce_resume(struct radeon_device *rdev);
1733 int radeon_vce_get_create_msg(struct radeon_device *rdev, int ring,
1735 int radeon_vce_get_destroy_msg(struct radeon_device *rdev, int ring,
1737 void radeon_vce_free_handles(struct radeon_device *rdev, struct drm_file *filp);
1738 void radeon_vce_note_usage(struct radeon_device *rdev);
1741 bool radeon_vce_semaphore_emit(struct radeon_device *rdev,
1745 void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
1746 void radeon_vce_fence_emit(struct radeon_device *rdev,
1748 int radeon_vce_ring_test(struct radeon_device *rdev, struct radeon_ring *ring);
1749 int radeon_vce_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
1774 void radeon_benchmark(struct radeon_device *rdev, int test_number);
1780 void radeon_test_moves(struct radeon_device *rdev);
1781 void radeon_test_ring_sync(struct radeon_device *rdev,
1784 void radeon_test_syncing(struct radeon_device *rdev);
1808 int radeon_debugfs_add_files(struct radeon_device *rdev,
1811 int radeon_debugfs_fence_init(struct radeon_device *rdev);
1818 u32 (*get_rptr)(struct radeon_device *rdev, struct radeon_ring *ring);
1819 u32 (*get_wptr)(struct radeon_device *rdev, struct radeon_ring *ring);
1820 void (*set_wptr)(struct radeon_device *rdev, struct radeon_ring *ring);
1823 int (*ib_parse)(struct radeon_device *rdev, struct radeon_ib *ib);
1827 void (*ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib);
1828 void (*emit_fence)(struct radeon_device *rdev, struct radeon_fence *fence);
1829 void (*hdp_flush)(struct radeon_device *rdev, struct radeon_ring *ring);
1830 bool (*emit_semaphore)(struct radeon_device *rdev, struct radeon_ring *cp,
1832 void (*vm_flush)(struct radeon_device *rdev, struct radeon_ring *ring,
1836 int (*ring_test)(struct radeon_device *rdev, struct radeon_ring *cp);
1837 int (*ib_test)(struct radeon_device *rdev, struct radeon_ring *cp);
1838 bool (*is_lockup)(struct radeon_device *rdev, struct radeon_ring *cp);
1841 void (*ring_start)(struct radeon_device *rdev, struct radeon_ring *cp);
1848 int (*init)(struct radeon_device *rdev);
1849 void (*fini)(struct radeon_device *rdev);
1850 int (*resume)(struct radeon_device *rdev);
1851 int (*suspend)(struct radeon_device *rdev);
1852 void (*vga_set_state)(struct radeon_device *rdev, bool state);
1853 int (*asic_reset)(struct radeon_device *rdev);
1855 void (*mmio_hdp_flush)(struct radeon_device *rdev);
1857 bool (*gui_idle)(struct radeon_device *rdev);
1859 int (*mc_wait_for_idle)(struct radeon_device *rdev);
1861 u32 (*get_xclk)(struct radeon_device *rdev);
1863 uint64_t (*get_gpu_clock_counter)(struct radeon_device *rdev);
1865 int (*get_allowed_info_register)(struct radeon_device *rdev, u32 reg, u32 *val);
1868 void (*tlb_flush)(struct radeon_device *rdev);
1870 void (*set_page)(struct radeon_device *rdev, unsigned i,
1874 int (*init)(struct radeon_device *rdev);
1875 void (*fini)(struct radeon_device *rdev);
1876 void (*copy_pages)(struct radeon_device *rdev,
1880 void (*write_pages)(struct radeon_device *rdev,
1885 void (*set_pages)(struct radeon_device *rdev,
1896 int (*set)(struct radeon_device *rdev);
1897 int (*process)(struct radeon_device *rdev);
1902 void (*bandwidth_update)(struct radeon_device *rdev);
1904 u32 (*get_vblank_counter)(struct radeon_device *rdev, int crtc);
1906 void (*wait_for_vblank)(struct radeon_device *rdev, int crtc);
1917 struct radeon_fence *(*blit)(struct radeon_device *rdev,
1923 struct radeon_fence *(*dma)(struct radeon_device *rdev,
1930 struct radeon_fence *(*copy)(struct radeon_device *rdev,
1940 int (*set_reg)(struct radeon_device *rdev, int reg,
1943 void (*clear_reg)(struct radeon_device *rdev, int reg);
1947 void (*init)(struct radeon_device *rdev);
1948 void (*fini)(struct radeon_device *rdev);
1949 bool (*sense)(struct radeon_device *rdev, enum radeon_hpd_id hpd);
1950 void (*set_polarity)(struct radeon_device *rdev, enum radeon_hpd_id hpd);
1954 void (*misc)(struct radeon_device *rdev);
1955 void (*prepare)(struct radeon_device *rdev);
1956 void (*finish)(struct radeon_device *rdev);
1957 void (*init_profile)(struct radeon_device *rdev);
1958 void (*get_dynpm_state)(struct radeon_device *rdev);
1959 uint32_t (*get_engine_clock)(struct radeon_device *rdev);
1960 void (*set_engine_clock)(struct radeon_device *rdev, uint32_t eng_clock);
1961 uint32_t (*get_memory_clock)(struct radeon_device *rdev);
1962 void (*set_memory_clock)(struct radeon_device *rdev, uint32_t mem_clock);
1963 int (*get_pcie_lanes)(struct radeon_device *rdev);
1964 void (*set_pcie_lanes)(struct radeon_device *rdev, int lanes);
1965 void (*set_clock_gating)(struct radeon_device *rdev, int enable);
1966 int (*set_uvd_clocks)(struct radeon_device *rdev, u32 vclk, u32 dclk);
1967 int (*set_vce_clocks)(struct radeon_device *rdev, u32 evclk, u32 ecclk);
1968 int (*get_temperature)(struct radeon_device *rdev);
1972 int (*init)(struct radeon_device *rdev);
1973 void (*setup_asic)(struct radeon_device *rdev);
1974 int (*enable)(struct radeon_device *rdev);
1975 int (*late_enable)(struct radeon_device *rdev);
1976 void (*disable)(struct radeon_device *rdev);
1977 int (*pre_set_power_state)(struct radeon_device *rdev);
1978 int (*set_power_state)(struct radeon_device *rdev);
1979 void (*post_set_power_state)(struct radeon_device *rdev);
1980 void (*display_configuration_changed)(struct radeon_device *rdev);
1981 void (*fini)(struct radeon_device *rdev);
1982 u32 (*get_sclk)(struct radeon_device *rdev, bool low);
1983 u32 (*get_mclk)(struct radeon_device *rdev, bool low);
1984 void (*print_power_state)(struct radeon_device *rdev, struct radeon_ps *ps);
1985 void (*debugfs_print_current_performance_level)(struct radeon_device *rdev, struct seq_file *m);
1986 int (*force_performance_level)(struct radeon_device *rdev, enum radeon_dpm_forced_level level);
1987 bool (*vblank_too_short)(struct radeon_device *rdev);
1988 void (*powergate_uvd)(struct radeon_device *rdev, bool gate);
1989 void (*enable_bapm)(struct radeon_device *rdev, bool enable);
1990 void (*fan_ctrl_set_mode)(struct radeon_device *rdev, u32 mode);
1991 u32 (*fan_ctrl_get_mode)(struct radeon_device *rdev);
1992 int (*set_fan_speed_percent)(struct radeon_device *rdev, u32 speed);
1993 int (*get_fan_speed_percent)(struct radeon_device *rdev, u32 *speed);
1994 u32 (*get_current_sclk)(struct radeon_device *rdev);
1995 u32 (*get_current_mclk)(struct radeon_device *rdev);
1999 void (*page_flip)(struct radeon_device *rdev, int crtc, u64 crtc_base);
2000 bool (*page_flip_pending)(struct radeon_device *rdev, int crtc);
2211 void radeon_agp_disable(struct radeon_device *rdev);
2212 int radeon_asic_init(struct radeon_device *rdev);
2469 int radeon_device_init(struct radeon_device *rdev,
2473 void radeon_device_fini(struct radeon_device *rdev);
2474 int radeon_gpu_wait_for_idle(struct radeon_device *rdev);
2478 uint32_t r100_mm_rreg_slow(struct radeon_device *rdev, uint32_t reg);
2479 void r100_mm_wreg_slow(struct radeon_device *rdev, uint32_t reg, uint32_t v); r100_mm_rreg()
2480 static inline uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg, r100_mm_rreg() argument
2484 if ((reg < rdev->rmmio_size || reg < RADEON_MIN_MMIO_SIZE) && !always_indirect) r100_mm_rreg()
2485 return readl(((void __iomem *)rdev->rmmio) + reg); r100_mm_rreg()
2487 return r100_mm_rreg_slow(rdev, reg); r100_mm_rreg()
2489 static inline void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v, r100_mm_wreg() argument
2492 if ((reg < rdev->rmmio_size || reg < RADEON_MIN_MMIO_SIZE) && !always_indirect) r100_mm_wreg()
2493 writel(v, ((void __iomem *)rdev->rmmio) + reg); r100_mm_wreg()
2495 r100_mm_wreg_slow(rdev, reg, v); r100_mm_wreg()
2498 u32 r100_io_rreg(struct radeon_device *rdev, u32 reg);
2499 void r100_io_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2501 u32 cik_mm_rdoorbell(struct radeon_device *rdev, u32 index);
2502 void cik_mm_wdoorbell(struct radeon_device *rdev, u32 index, u32 v);
2522 #define RREG8(reg) readb((rdev->rmmio) + (reg))
2523 #define WREG8(reg, v) writeb(v, (rdev->rmmio) + (reg))
2524 #define RREG16(reg) readw((rdev->rmmio) + (reg))
2525 #define WREG16(reg, v) writew(v, (rdev->rmmio) + (reg))
2526 #define RREG32(reg) r100_mm_rreg(rdev, (reg), false)
2527 #define RREG32_IDX(reg) r100_mm_rreg(rdev, (reg), true)
2528 #define DREG32(reg) printk(KERN_INFO "REGISTER: " #reg " : 0x%08X\n", r100_mm_rreg(rdev, (reg), false))
2529 #define WREG32(reg, v) r100_mm_wreg(rdev, (reg), (v), false)
2530 #define WREG32_IDX(reg, v) r100_mm_wreg(rdev, (reg), (v), true)
2533 #define RREG32_PLL(reg) rdev->pll_rreg(rdev, (reg))
2534 #define WREG32_PLL(reg, v) rdev->pll_wreg(rdev, (reg), (v))
2535 #define RREG32_MC(reg) rdev->mc_rreg(rdev, (reg))
2536 #define WREG32_MC(reg, v) rdev->mc_wreg(rdev, (reg), (v))
2537 #define RREG32_PCIE(reg) rv370_pcie_rreg(rdev, (reg))
2538 #define WREG32_PCIE(reg, v) rv370_pcie_wreg(rdev, (reg), (v))
2539 #define RREG32_PCIE_PORT(reg) rdev->pciep_rreg(rdev, (reg))
2540 #define WREG32_PCIE_PORT(reg, v) rdev->pciep_wreg(rdev, (reg), (v))
2541 #define RREG32_SMC(reg) tn_smc_rreg(rdev, (reg))
2542 #define WREG32_SMC(reg, v) tn_smc_wreg(rdev, (reg), (v))
2543 #define RREG32_RCU(reg) r600_rcu_rreg(rdev, (reg))
2544 #define WREG32_RCU(reg, v) r600_rcu_wreg(rdev, (reg), (v))
2545 #define RREG32_CG(reg) eg_cg_rreg(rdev, (reg))
2546 #define WREG32_CG(reg, v) eg_cg_wreg(rdev, (reg), (v))
2547 #define RREG32_PIF_PHY0(reg) eg_pif_phy0_rreg(rdev, (reg))
2548 #define WREG32_PIF_PHY0(reg, v) eg_pif_phy0_wreg(rdev, (reg), (v))
2549 #define RREG32_PIF_PHY1(reg) eg_pif_phy1_rreg(rdev, (reg))
2550 #define WREG32_PIF_PHY1(reg, v) eg_pif_phy1_wreg(rdev, (reg), (v))
2551 #define RREG32_UVD_CTX(reg) r600_uvd_ctx_rreg(rdev, (reg))
2552 #define WREG32_UVD_CTX(reg, v) r600_uvd_ctx_wreg(rdev, (reg), (v))
2553 #define RREG32_DIDT(reg) cik_didt_rreg(rdev, (reg))
2554 #define WREG32_DIDT(reg, v) cik_didt_wreg(rdev, (reg), (v))
2578 #define DREG32_SYS(sqf, rdev, reg) seq_printf((sqf), #reg " : 0x%08X\n", r100_mm_rreg((rdev), (reg), false))
2579 #define RREG32_IO(reg) r100_io_rreg(rdev, (reg))
2580 #define WREG32_IO(reg, v) r100_io_wreg(rdev, (reg), (v))
2582 #define RDOORBELL32(index) cik_mm_rdoorbell(rdev, (index))
2583 #define WDOORBELL32(index, v) cik_mm_wdoorbell(rdev, (index), (v))
2593 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
2594 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
2595 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg);
2596 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2597 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg);
2598 void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2599 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg);
2600 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2601 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg);
2602 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2603 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg);
2604 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2605 u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg);
2606 void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2607 u32 cik_didt_rreg(struct radeon_device *rdev, u32 reg);
2608 void cik_didt_wreg(struct radeon_device *rdev, u32 reg, u32 v);
2610 void r100_pll_errata_after_index(struct radeon_device *rdev);
2616 #define ASIC_IS_RN50(rdev) ((rdev->pdev->device == 0x515e) || \
2617 (rdev->pdev->device == 0x5969))
2618 #define ASIC_IS_RV100(rdev) ((rdev->family == CHIP_RV100) || \
2619 (rdev->family == CHIP_RV200) || \
2620 (rdev->family == CHIP_RS100) || \
2621 (rdev->family == CHIP_RS200) || \
2622 (rdev->family == CHIP_RV250) || \
2623 (rdev->family == CHIP_RV280) || \
2624 (rdev->family == CHIP_RS300))
2625 #define ASIC_IS_R300(rdev) ((rdev->family == CHIP_R300) || \
2626 (rdev->family == CHIP_RV350) || \
2627 (rdev->family == CHIP_R350) || \
2628 (rdev->family == CHIP_RV380) || \
2629 (rdev->family == CHIP_R420) || \
2630 (rdev->family == CHIP_R423) || \
2631 (rdev->family == CHIP_RV410) || \
2632 (rdev->family == CHIP_RS400) || \
2633 (rdev->family == CHIP_RS480))
2634 #define ASIC_IS_X2(rdev) ((rdev->ddev->pdev->device == 0x9441) || \
2635 (rdev->ddev->pdev->device == 0x9443) || \
2636 (rdev->ddev->pdev->device == 0x944B) || \
2637 (rdev->ddev->pdev->device == 0x9506) || \
2638 (rdev->ddev->pdev->device == 0x9509) || \
2639 (rdev->ddev->pdev->device == 0x950F) || \
2640 (rdev->ddev->pdev->device == 0x689C) || \
2641 (rdev->ddev->pdev->device == 0x689D))
2642 #define ASIC_IS_AVIVO(rdev) ((rdev->family >= CHIP_RS600))
2643 #define ASIC_IS_DCE2(rdev) ((rdev->family == CHIP_RS600) || \
2644 (rdev->family == CHIP_RS690) || \
2645 (rdev->family == CHIP_RS740) || \
2646 (rdev->family >= CHIP_R600))
2647 #define ASIC_IS_DCE3(rdev) ((rdev->family >= CHIP_RV620))
2648 #define ASIC_IS_DCE32(rdev) ((rdev->family >= CHIP_RV730))
2649 #define ASIC_IS_DCE4(rdev) ((rdev->family >= CHIP_CEDAR))
2650 #define ASIC_IS_DCE41(rdev) ((rdev->family >= CHIP_PALM) && \
2651 (rdev->flags & RADEON_IS_IGP))
2652 #define ASIC_IS_DCE5(rdev) ((rdev->family >= CHIP_BARTS))
2653 #define ASIC_IS_DCE6(rdev) ((rdev->family >= CHIP_ARUBA))
2654 #define ASIC_IS_DCE61(rdev) ((rdev->family >= CHIP_ARUBA) && \
2655 (rdev->flags & RADEON_IS_IGP))
2656 #define ASIC_IS_DCE64(rdev) ((rdev->family == CHIP_OLAND))
2657 #define ASIC_IS_NODCE(rdev) ((rdev->family == CHIP_HAINAN))
2658 #define ASIC_IS_DCE8(rdev) ((rdev->family >= CHIP_BONAIRE))
2659 #define ASIC_IS_DCE81(rdev) ((rdev->family == CHIP_KAVERI))
2660 #define ASIC_IS_DCE82(rdev) ((rdev->family == CHIP_BONAIRE))
2661 #define ASIC_IS_DCE83(rdev) ((rdev->family == CHIP_KABINI) || \
2662 (rdev->family == CHIP_MULLINS))
2664 #define ASIC_IS_LOMBOK(rdev) ((rdev->ddev->pdev->device == 0x6849) || \
2665 (rdev->ddev->pdev->device == 0x6850) || \
2666 (rdev->ddev->pdev->device == 0x6858) || \
2667 (rdev->ddev->pdev->device == 0x6859) || \
2668 (rdev->ddev->pdev->device == 0x6840) || \
2669 (rdev->ddev->pdev->device == 0x6841) || \
2670 (rdev->ddev->pdev->device == 0x6842) || \
2671 (rdev->ddev->pdev->device == 0x6843))
2676 #define RBIOS8(i) (rdev->bios[i])
2680 int radeon_combios_init(struct radeon_device *rdev);
2681 void radeon_combios_fini(struct radeon_device *rdev);
2682 int radeon_atombios_init(struct radeon_device *rdev);
2683 void radeon_atombios_fini(struct radeon_device *rdev);
2712 #define radeon_init(rdev) (rdev)->asic->init((rdev))
2713 #define radeon_fini(rdev) (rdev)->asic->fini((rdev))
2714 #define radeon_resume(rdev) (rdev)->asic->resume((rdev))
2715 #define radeon_suspend(rdev) (rdev)->asic->suspend((rdev))
2716 #define radeon_cs_parse(rdev, r, p) (rdev)->asic->ring[(r)]->cs_parse((p))
2717 #define radeon_vga_set_state(rdev, state) (rdev)->asic->vga_set_state((rdev), (state))
2718 #define radeon_asic_reset(rdev) (rdev)->asic->asic_reset((rdev))
2719 #define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart.tlb_flush((rdev))
2720 #define radeon_gart_get_page_entry(a, f) (rdev)->asic->gart.get_page_entry((a), (f))
2721 #define radeon_gart_set_page(rdev, i, e) (rdev)->asic->gart.set_page((rdev), (i), (e))
2722 #define radeon_asic_vm_init(rdev) (rdev)->asic->vm.init((rdev))
2723 #define radeon_asic_vm_fini(rdev) (rdev)->asic->vm.fini((rdev))
2724 #define radeon_asic_vm_copy_pages(rdev, ib, pe, src, count) ((rdev)->asic->vm.copy_pages((rdev), (ib), (pe), (src), (count)))
2725 #define radeon_asic_vm_write_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.write_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags)))
2726 #define radeon_asic_vm_set_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.set_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags)))
2727 #define radeon_asic_vm_pad_ib(rdev, ib) ((rdev)->asic->vm.pad_ib((ib)))
2728 #define radeon_ring_start(rdev, r, cp) (rdev)->asic->ring[(r)]->ring_start((rdev), (cp))
2729 #define radeon_ring_test(rdev, r, cp) (rdev)->asic->ring[(r)]->ring_test((rdev), (cp))
2730 #define radeon_ib_test(rdev, r, cp) (rdev)->asic->ring[(r)]->ib_test((rdev), (cp))
2731 #define radeon_ring_ib_execute(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_execute((rdev), (ib))
2732 #define radeon_ring_ib_parse(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_parse((rdev), (ib))
2733 #define radeon_ring_is_lockup(rdev, r, cp) (rdev)->asic->ring[(r)]->is_lockup((rdev), (cp))
2734 #define radeon_ring_vm_flush(rdev, r, vm_id, pd_addr) (rdev)->asic->ring[(r)->idx]->vm_flush((rdev), (r), (vm_id), (pd_addr))
2735 #define radeon_ring_get_rptr(rdev, r) (rdev)->asic->ring[(r)->idx]->get_rptr((rdev), (r))
2736 #define radeon_ring_get_wptr(rdev, r) (rdev)->asic->ring[(r)->idx]->get_wptr((rdev), (r))
2737 #define radeon_ring_set_wptr(rdev, r) (rdev)->asic->ring[(r)->idx]->set_wptr((rdev), (r))
2738 #define radeon_irq_set(rdev) (rdev)->asic->irq.set((rdev))
2739 #define radeon_irq_process(rdev) (rdev)->asic->irq.process((rdev))
2740 #define radeon_get_vblank_counter(rdev, crtc) (rdev)->asic->display.get_vblank_counter((rdev), (crtc))
2741 #define radeon_set_backlight_level(rdev, e, l) (rdev)->asic->display.set_backlight_level((e), (l))
2742 #define radeon_get_backlight_level(rdev, e) (rdev)->asic->display.get_backlight_level((e))
2743 #define radeon_hdmi_enable(rdev, e, b) (rdev)->asic->display.hdmi_enable((e), (b))
2744 #define radeon_hdmi_setmode(rdev, e, m) (rdev)->asic->display.hdmi_setmode((e), (m))
2745 #define radeon_fence_ring_emit(rdev, r, fence) (rdev)->asic->ring[(r)]->emit_fence((rdev), (fence))
2746 #define radeon_semaphore_ring_emit(rdev, r, cp, semaphore, emit_wait) (rdev)->asic->ring[(r)]->emit_semaphore((rdev), (cp), (semaphore), (emit_wait))
2747 #define radeon_copy_blit(rdev, s, d, np, resv) (rdev)->asic->copy.blit((rdev), (s), (d), (np), (resv))
2748 #define radeon_copy_dma(rdev, s, d, np, resv) (rdev)->asic->copy.dma((rdev), (s), (d), (np), (resv))
2749 #define radeon_copy(rdev, s, d, np, resv) (rdev)->asic->copy.copy((rdev), (s), (d), (np), (resv))
2750 #define radeon_copy_blit_ring_index(rdev) (rdev)->asic->copy.blit_ring_index
2751 #define radeon_copy_dma_ring_index(rdev) (rdev)->asic->copy.dma_ring_index
2752 #define radeon_copy_ring_index(rdev) (rdev)->asic->copy.copy_ring_index
2753 #define radeon_get_engine_clock(rdev) (rdev)->asic->pm.get_engine_clock((rdev))
2754 #define radeon_set_engine_clock(rdev, e) (rdev)->asic->pm.set_engine_clock((rdev), (e))
2755 #define radeon_get_memory_clock(rdev) (rdev)->asic->pm.get_memory_clock((rdev))
2756 #define radeon_set_memory_clock(rdev, e) (rdev)->asic->pm.set_memory_clock((rdev), (e))
2757 #define radeon_get_pcie_lanes(rdev) (rdev)->asic->pm.get_pcie_lanes((rdev))
2758 #define radeon_set_pcie_lanes(rdev, l) (rdev)->asic->pm.set_pcie_lanes((rdev), (l))
2759 #define radeon_set_clock_gating(rdev, e) (rdev)->asic->pm.set_clock_gating((rdev), (e))
2760 #define radeon_set_uvd_clocks(rdev, v, d) (rdev)->asic->pm.set_uvd_clocks((rdev), (v), (d))
2761 #define radeon_set_vce_clocks(rdev, ev, ec) (rdev)->asic->pm.set_vce_clocks((rdev), (ev), (ec))
2762 #define radeon_get_temperature(rdev) (rdev)->asic->pm.get_temperature((rdev))
2763 #define radeon_set_surface_reg(rdev, r, f, p, o, s) ((rdev)->asic->surface.set_reg((rdev), (r), (f), (p), (o), (s)))
2764 #define radeon_clear_surface_reg(rdev, r) ((rdev)->asic->surface.clear_reg((rdev), (r)))
2765 #define radeon_bandwidth_update(rdev) (rdev)->asic->display.bandwidth_update((rdev))
2766 #define radeon_hpd_init(rdev) (rdev)->asic->hpd.init((rdev))
2767 #define radeon_hpd_fini(rdev) (rdev)->asic->hpd.fini((rdev))
2768 #define radeon_hpd_sense(rdev, h) (rdev)->asic->hpd.sense((rdev), (h))
2769 #define radeon_hpd_set_polarity(rdev, h) (rdev)->asic->hpd.set_polarity((rdev), (h))
2770 #define radeon_gui_idle(rdev) (rdev)->asic->gui_idle((rdev))
2771 #define radeon_pm_misc(rdev) (rdev)->asic->pm.misc((rdev))
2772 #define radeon_pm_prepare(rdev) (rdev)->asic->pm.prepare((rdev))
2773 #define radeon_pm_finish(rdev) (rdev)->asic->pm.finish((rdev))
2774 #define radeon_pm_init_profile(rdev) (rdev)->asic->pm.init_profile((rdev))
2775 #define radeon_pm_get_dynpm_state(rdev) (rdev)->asic->pm.get_dynpm_state((rdev))
2776 #define radeon_page_flip(rdev, crtc, base) (rdev)->asic->pflip.page_flip((rdev), (crtc), (base))
2777 #define radeon_page_flip_pending(rdev, crtc) (rdev)->asic->pflip.page_flip_pending((rdev), (crtc))
2778 #define radeon_wait_for_vblank(rdev, crtc) (rdev)->asic->display.wait_for_vblank((rdev), (crtc))
2779 #define radeon_mc_wait_for_idle(rdev) (rdev)->asic->mc_wait_for_idle((rdev))
2780 #define radeon_get_xclk(rdev) (rdev)->asic->get_xclk((rdev))
2781 #define radeon_get_gpu_clock_counter(rdev) (rdev)->asic->get_gpu_clock_counter((rdev))
2782 #define radeon_get_allowed_info_register(rdev, r, v) (rdev)->asic->get_allowed_info_register((rdev), (r), (v))
2783 #define radeon_dpm_init(rdev) rdev->asic->dpm.init((rdev))
2784 #define radeon_dpm_setup_asic(rdev) rdev->asic->dpm.setup_asic((rdev))
2785 #define radeon_dpm_enable(rdev) rdev->asic->dpm.enable((rdev))
2786 #define radeon_dpm_late_enable(rdev) rdev->asic->dpm.late_enable((rdev))
2787 #define radeon_dpm_disable(rdev) rdev->asic->dpm.disable((rdev))
2788 #define radeon_dpm_pre_set_power_state(rdev) rdev->asic->dpm.pre_set_power_state((rdev))
2789 #define radeon_dpm_set_power_state(rdev) rdev->asic->dpm.set_power_state((rdev))
2790 #define radeon_dpm_post_set_power_state(rdev) rdev->asic->dpm.post_set_power_state((rdev))
2791 #define radeon_dpm_display_configuration_changed(rdev) rdev->asic->dpm.display_configuration_changed((rdev))
2792 #define radeon_dpm_fini(rdev) rdev->asic->dpm.fini((rdev))
2793 #define radeon_dpm_get_sclk(rdev, l) rdev->asic->dpm.get_sclk((rdev), (l))
2794 #define radeon_dpm_get_mclk(rdev, l) rdev->asic->dpm.get_mclk((rdev), (l))
2795 #define radeon_dpm_print_power_state(rdev, ps) rdev->asic->dpm.print_power_state((rdev), (ps))
2796 #define radeon_dpm_debugfs_print_current_performance_level(rdev, m) rdev->asic->dpm.debugfs_print_current_performance_level((rdev), (m))
2797 #define radeon_dpm_force_performance_level(rdev, l) rdev->asic->dpm.force_performance_level((rdev), (l))
2798 #define radeon_dpm_vblank_too_short(rdev) rdev->asic->dpm.vblank_too_short((rdev))
2799 #define radeon_dpm_powergate_uvd(rdev, g) rdev->asic->dpm.powergate_uvd((rdev), (g))
2800 #define radeon_dpm_enable_bapm(rdev, e) rdev->asic->dpm.enable_bapm((rdev), (e))
2801 #define radeon_dpm_get_current_sclk(rdev) rdev->asic->dpm.get_current_sclk((rdev))
2802 #define radeon_dpm_get_current_mclk(rdev) rdev->asic->dpm.get_current_mclk((rdev))
2806 extern int radeon_gpu_reset(struct radeon_device *rdev);
2807 extern void radeon_pci_config_reset(struct radeon_device *rdev);
2808 extern void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung);
2809 extern void radeon_agp_disable(struct radeon_device *rdev);
2810 extern int radeon_modeset_init(struct radeon_device *rdev);
2811 extern void radeon_modeset_fini(struct radeon_device *rdev);
2812 extern bool radeon_card_posted(struct radeon_device *rdev);
2813 extern void radeon_update_bandwidth_info(struct radeon_device *rdev);
2814 extern void radeon_update_display_priority(struct radeon_device *rdev);
2815 extern bool radeon_boot_test_post_card(struct radeon_device *rdev);
2816 extern void radeon_scratch_init(struct radeon_device *rdev);
2817 extern void radeon_wb_fini(struct radeon_device *rdev);
2818 extern int radeon_wb_init(struct radeon_device *rdev);
2819 extern void radeon_wb_disable(struct radeon_device *rdev);
2820 extern void radeon_surface_init(struct radeon_device *rdev);
2822 extern void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
2823 extern void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
2830 extern void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base);
2831 extern void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
2834 extern void radeon_ttm_set_active_vram_size(struct radeon_device *rdev, u64 size);
2835 extern void radeon_program_register_sequence(struct radeon_device *rdev,
2842 int radeon_vm_manager_init(struct radeon_device *rdev);
2843 void radeon_vm_manager_fini(struct radeon_device *rdev);
2844 int radeon_vm_init(struct radeon_device *rdev, struct radeon_vm *vm);
2845 void radeon_vm_fini(struct radeon_device *rdev, struct radeon_vm *vm);
2846 struct radeon_bo_list *radeon_vm_get_bos(struct radeon_device *rdev,
2849 struct radeon_fence *radeon_vm_grab_id(struct radeon_device *rdev,
2851 void radeon_vm_flush(struct radeon_device *rdev,
2854 void radeon_vm_fence(struct radeon_device *rdev,
2857 uint64_t radeon_vm_map_gart(struct radeon_device *rdev, uint64_t addr);
2858 int radeon_vm_update_page_directory(struct radeon_device *rdev,
2860 int radeon_vm_clear_freed(struct radeon_device *rdev,
2862 int radeon_vm_clear_invalids(struct radeon_device *rdev,
2864 int radeon_vm_bo_update(struct radeon_device *rdev,
2867 void radeon_vm_bo_invalidate(struct radeon_device *rdev,
2871 struct radeon_bo_va *radeon_vm_bo_add(struct radeon_device *rdev,
2874 int radeon_vm_bo_set_addr(struct radeon_device *rdev,
2878 void radeon_vm_bo_rmv(struct radeon_device *rdev,
2883 struct r600_audio_pin *r600_audio_get_pin(struct radeon_device *rdev);
2884 struct r600_audio_pin *dce6_audio_get_pin(struct radeon_device *rdev);
2885 void r600_audio_enable(struct radeon_device *rdev,
2888 void dce6_audio_enable(struct radeon_device *rdev,
2895 int r600_vram_scratch_init(struct radeon_device *rdev);
2896 void r600_vram_scratch_fini(struct radeon_device *rdev);
2927 extern u32 r6xx_remap_render_backend(struct radeon_device *rdev,
2937 extern int ni_init_microcode(struct radeon_device *rdev);
2938 extern int ni_mc_load_microcode(struct radeon_device *rdev);
2942 extern int radeon_acpi_init(struct radeon_device *rdev);
2943 extern void radeon_acpi_fini(struct radeon_device *rdev);
2944 extern bool radeon_acpi_is_pcie_performance_request_supported(struct radeon_device *rdev);
2945 extern int radeon_acpi_pcie_performance_request(struct radeon_device *rdev,
2947 extern int radeon_acpi_pcie_notify_device_ready(struct radeon_device *rdev);
2949 static inline int radeon_acpi_init(struct radeon_device *rdev) { return 0; } radeon_acpi_fini() argument
2950 static inline void radeon_acpi_fini(struct radeon_device *rdev) { } argument
H A Drs600.c47 static void rs600_gpu_init(struct radeon_device *rdev);
48 int rs600_mc_wait_for_idle(struct radeon_device *rdev);
56 static bool avivo_is_in_vblank(struct radeon_device *rdev, int crtc) avivo_is_in_vblank() argument
64 static bool avivo_is_counter_moving(struct radeon_device *rdev, int crtc) avivo_is_counter_moving() argument
80 * @rdev: radeon_device pointer
85 void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc) avivo_wait_for_vblank() argument
89 if (crtc >= rdev->num_crtc) avivo_wait_for_vblank()
98 while (avivo_is_in_vblank(rdev, crtc)) { avivo_wait_for_vblank()
100 if (!avivo_is_counter_moving(rdev, crtc)) avivo_wait_for_vblank()
105 while (!avivo_is_in_vblank(rdev, crtc)) { avivo_wait_for_vblank()
107 if (!avivo_is_counter_moving(rdev, crtc)) avivo_wait_for_vblank()
113 void rs600_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base) rs600_page_flip() argument
115 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; rs600_page_flip()
130 for (i = 0; i < rdev->usec_timeout; i++) { rs600_page_flip()
142 bool rs600_page_flip_pending(struct radeon_device *rdev, int crtc_id) rs600_page_flip_pending() argument
144 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; rs600_page_flip_pending()
154 struct radeon_device *rdev = dev->dev_private; avivo_program_fmt() local
215 void rs600_pm_misc(struct radeon_device *rdev) rs600_pm_misc() argument
217 int requested_index = rdev->pm.requested_power_state_index; rs600_pm_misc()
218 struct radeon_power_state *ps = &rdev->pm.power_state[requested_index]; rs600_pm_misc()
244 radeon_atom_set_voltage(rdev, voltage->vddc_id, SET_VOLTAGE_TYPE_ASIC_VDDC); rs600_pm_misc()
298 if ((rdev->flags & RADEON_IS_PCIE) && rs600_pm_misc()
299 !(rdev->flags & RADEON_IS_IGP) && rs600_pm_misc()
300 rdev->asic->pm.set_pcie_lanes && rs600_pm_misc()
302 rdev->pm.power_state[rdev->pm.current_power_state_index].pcie_lanes)) { rs600_pm_misc()
303 radeon_set_pcie_lanes(rdev, rs600_pm_misc()
309 void rs600_pm_prepare(struct radeon_device *rdev) rs600_pm_prepare() argument
311 struct drm_device *ddev = rdev->ddev; rs600_pm_prepare()
327 void rs600_pm_finish(struct radeon_device *rdev) rs600_pm_finish() argument
329 struct drm_device *ddev = rdev->ddev; rs600_pm_finish()
346 bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) rs600_hpd_sense() argument
368 void rs600_hpd_set_polarity(struct radeon_device *rdev, rs600_hpd_set_polarity() argument
372 bool connected = rs600_hpd_sense(rdev, hpd); rs600_hpd_set_polarity()
396 void rs600_hpd_init(struct radeon_device *rdev) rs600_hpd_init() argument
398 struct drm_device *dev = rdev->ddev; rs600_hpd_init()
417 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd); rs600_hpd_init()
419 radeon_irq_kms_enable_hpd(rdev, enable); rs600_hpd_init()
422 void rs600_hpd_fini(struct radeon_device *rdev) rs600_hpd_fini() argument
424 struct drm_device *dev = rdev->ddev; rs600_hpd_fini()
444 radeon_irq_kms_disable_hpd(rdev, disable); rs600_hpd_fini()
447 int rs600_asic_reset(struct radeon_device *rdev) rs600_asic_reset() argument
458 rv515_mc_stop(rdev, &save); rs600_asic_reset()
460 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); rs600_asic_reset()
468 pci_save_state(rdev->pdev); rs600_asic_reset()
470 pci_clear_master(rdev->pdev); rs600_asic_reset()
480 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); rs600_asic_reset()
488 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); rs600_asic_reset()
496 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); rs600_asic_reset()
498 pci_restore_state(rdev->pdev); rs600_asic_reset()
501 dev_err(rdev->dev, "failed to reset GPU\n"); rs600_asic_reset()
504 dev_info(rdev->dev, "GPU reset succeed\n"); rs600_asic_reset()
505 rv515_mc_resume(rdev, &save); rs600_asic_reset()
512 void rs600_gart_tlb_flush(struct radeon_device *rdev) rs600_gart_tlb_flush() argument
530 static int rs600_gart_init(struct radeon_device *rdev) rs600_gart_init() argument
534 if (rdev->gart.robj) { rs600_gart_init()
539 r = radeon_gart_init(rdev); rs600_gart_init()
543 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; rs600_gart_init()
544 return radeon_gart_table_vram_alloc(rdev); rs600_gart_init()
547 static int rs600_gart_enable(struct radeon_device *rdev) rs600_gart_enable() argument
552 if (rdev->gart.robj == NULL) { rs600_gart_enable()
553 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); rs600_gart_enable()
556 r = radeon_gart_table_vram_pin(rdev); rs600_gart_enable()
589 rdev->gart.table_addr); rs600_gart_enable()
590 WREG32_MC(R_00013C_MC_PT0_CONTEXT0_FLAT_START_ADDR, rdev->mc.gtt_start); rs600_gart_enable()
591 WREG32_MC(R_00014C_MC_PT0_CONTEXT0_FLAT_END_ADDR, rdev->mc.gtt_end); rs600_gart_enable()
595 WREG32_MC(R_000112_MC_PT0_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start); rs600_gart_enable()
596 WREG32_MC(R_000114_MC_PT0_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end); rs600_gart_enable()
603 rs600_gart_tlb_flush(rdev); rs600_gart_enable()
605 (unsigned)(rdev->mc.gtt_size >> 20), rs600_gart_enable()
606 (unsigned long long)rdev->gart.table_addr); rs600_gart_enable()
607 rdev->gart.ready = true; rs600_gart_enable()
611 static void rs600_gart_disable(struct radeon_device *rdev) rs600_gart_disable() argument
619 radeon_gart_table_vram_unpin(rdev); rs600_gart_disable()
622 static void rs600_gart_fini(struct radeon_device *rdev) rs600_gart_fini() argument
624 radeon_gart_fini(rdev); rs600_gart_fini()
625 rs600_gart_disable(rdev); rs600_gart_fini()
626 radeon_gart_table_vram_free(rdev); rs600_gart_fini()
644 void rs600_gart_set_page(struct radeon_device *rdev, unsigned i, rs600_gart_set_page() argument
647 void __iomem *ptr = (void *)rdev->gart.ptr; rs600_gart_set_page()
651 int rs600_irq_set(struct radeon_device *rdev) rs600_irq_set() argument
660 if (ASIC_IS_DCE2(rdev)) rs600_irq_set()
666 if (!rdev->irq.installed) { rs600_irq_set()
671 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { rs600_irq_set()
674 if (rdev->irq.crtc_vblank_int[0] || rs600_irq_set()
675 atomic_read(&rdev->irq.pflip[0])) { rs600_irq_set()
678 if (rdev->irq.crtc_vblank_int[1] || rs600_irq_set()
679 atomic_read(&rdev->irq.pflip[1])) { rs600_irq_set()
682 if (rdev->irq.hpd[0]) { rs600_irq_set()
685 if (rdev->irq.hpd[1]) { rs600_irq_set()
688 if (rdev->irq.afmt[0]) { rs600_irq_set()
695 if (ASIC_IS_DCE2(rdev)) rs600_irq_set()
704 static inline u32 rs600_irq_ack(struct radeon_device *rdev) rs600_irq_ack() argument
711 rdev->irq.stat_regs.r500.disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS); rs600_irq_ack()
712 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_ack()
716 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_ack()
720 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_ack()
725 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_ack()
731 rdev->irq.stat_regs.r500.disp_int = 0; rs600_irq_ack()
734 if (ASIC_IS_DCE2(rdev)) { rs600_irq_ack()
735 rdev->irq.stat_regs.r500.hdmi0_status = RREG32(R_007404_HDMI0_STATUS) & rs600_irq_ack()
737 if (G_007404_HDMI0_AZ_FORMAT_WTRIG(rdev->irq.stat_regs.r500.hdmi0_status)) { rs600_irq_ack()
743 rdev->irq.stat_regs.r500.hdmi0_status = 0; rs600_irq_ack()
751 void rs600_irq_disable(struct radeon_device *rdev) rs600_irq_disable() argument
760 rs600_irq_ack(rdev); rs600_irq_disable()
763 int rs600_irq_process(struct radeon_device *rdev) rs600_irq_process() argument
769 status = rs600_irq_ack(rdev); rs600_irq_process()
771 !rdev->irq.stat_regs.r500.disp_int && rs600_irq_process()
772 !rdev->irq.stat_regs.r500.hdmi0_status) { rs600_irq_process()
776 rdev->irq.stat_regs.r500.disp_int || rs600_irq_process()
777 rdev->irq.stat_regs.r500.hdmi0_status) { rs600_irq_process()
780 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); rs600_irq_process()
783 if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_process()
784 if (rdev->irq.crtc_vblank_int[0]) { rs600_irq_process()
785 drm_handle_vblank(rdev->ddev, 0); rs600_irq_process()
786 rdev->pm.vblank_sync = true; rs600_irq_process()
787 wake_up(&rdev->irq.vblank_queue); rs600_irq_process()
789 if (atomic_read(&rdev->irq.pflip[0])) rs600_irq_process()
790 radeon_crtc_handle_vblank(rdev, 0); rs600_irq_process()
792 if (G_007EDC_LB_D2_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_process()
793 if (rdev->irq.crtc_vblank_int[1]) { rs600_irq_process()
794 drm_handle_vblank(rdev->ddev, 1); rs600_irq_process()
795 rdev->pm.vblank_sync = true; rs600_irq_process()
796 wake_up(&rdev->irq.vblank_queue); rs600_irq_process()
798 if (atomic_read(&rdev->irq.pflip[1])) rs600_irq_process()
799 radeon_crtc_handle_vblank(rdev, 1); rs600_irq_process()
801 if (G_007EDC_DC_HOT_PLUG_DETECT1_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_process()
805 if (G_007EDC_DC_HOT_PLUG_DETECT2_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) { rs600_irq_process()
809 if (G_007404_HDMI0_AZ_FORMAT_WTRIG(rdev->irq.stat_regs.r500.hdmi0_status)) { rs600_irq_process()
813 status = rs600_irq_ack(rdev); rs600_irq_process()
816 schedule_delayed_work(&rdev->hotplug_work, 0); rs600_irq_process()
818 schedule_work(&rdev->audio_work); rs600_irq_process()
819 if (rdev->msi_enabled) { rs600_irq_process()
820 switch (rdev->family) { rs600_irq_process()
836 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc) rs600_get_vblank_counter() argument
844 int rs600_mc_wait_for_idle(struct radeon_device *rdev) rs600_mc_wait_for_idle() argument
848 for (i = 0; i < rdev->usec_timeout; i++) { rs600_mc_wait_for_idle()
856 static void rs600_gpu_init(struct radeon_device *rdev) rs600_gpu_init() argument
858 r420_pipes_init(rdev); rs600_gpu_init()
860 if (rs600_mc_wait_for_idle(rdev)) rs600_gpu_init()
861 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); rs600_gpu_init()
864 static void rs600_mc_init(struct radeon_device *rdev) rs600_mc_init() argument
868 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); rs600_mc_init()
869 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); rs600_mc_init()
870 rdev->mc.vram_is_ddr = true; rs600_mc_init()
871 rdev->mc.vram_width = 128; rs600_mc_init()
872 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE); rs600_mc_init()
873 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; rs600_mc_init()
874 rdev->mc.visible_vram_size = rdev->mc.aper_size; rs600_mc_init()
875 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev); rs600_mc_init()
878 radeon_vram_location(rdev, &rdev->mc, base); rs600_mc_init()
879 rdev->mc.gtt_base_align = 0; rs600_mc_init()
880 radeon_gtt_location(rdev, &rdev->mc); rs600_mc_init()
881 radeon_update_bandwidth_info(rdev); rs600_mc_init()
884 void rs600_bandwidth_update(struct radeon_device *rdev) rs600_bandwidth_update() argument
891 if (!rdev->mode_info.mode_config_initialized) rs600_bandwidth_update()
894 radeon_update_display_priority(rdev); rs600_bandwidth_update()
896 if (rdev->mode_info.crtcs[0]->base.enabled) rs600_bandwidth_update()
897 mode0 = &rdev->mode_info.crtcs[0]->base.mode; rs600_bandwidth_update()
898 if (rdev->mode_info.crtcs[1]->base.enabled) rs600_bandwidth_update()
899 mode1 = &rdev->mode_info.crtcs[1]->base.mode; rs600_bandwidth_update()
901 rs690_line_buffer_adjust(rdev, mode0, mode1); rs600_bandwidth_update()
903 if (rdev->disp_priority == 2) { rs600_bandwidth_update()
915 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg) rs600_mc_rreg() argument
920 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs600_mc_rreg()
924 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs600_mc_rreg()
928 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rs600_mc_wreg() argument
932 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs600_mc_wreg()
936 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs600_mc_wreg()
939 static void rs600_debugfs(struct radeon_device *rdev) rs600_debugfs() argument
941 if (r100_debugfs_rbbm_init(rdev)) rs600_debugfs()
945 void rs600_set_safe_registers(struct radeon_device *rdev) rs600_set_safe_registers() argument
947 rdev->config.r300.reg_safe_bm = rs600_reg_safe_bm; rs600_set_safe_registers()
948 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(rs600_reg_safe_bm); rs600_set_safe_registers()
951 static void rs600_mc_program(struct radeon_device *rdev) rs600_mc_program() argument
956 rv515_mc_stop(rdev, &save); rs600_mc_program()
959 if (rs600_mc_wait_for_idle(rdev)) rs600_mc_program()
960 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); rs600_mc_program()
968 S_000004_MC_FB_START(rdev->mc.vram_start >> 16) | rs600_mc_program()
969 S_000004_MC_FB_TOP(rdev->mc.vram_end >> 16)); rs600_mc_program()
971 S_000134_HDP_FB_START(rdev->mc.vram_start >> 16)); rs600_mc_program()
973 rv515_mc_resume(rdev, &save); rs600_mc_program()
976 static int rs600_startup(struct radeon_device *rdev) rs600_startup() argument
980 rs600_mc_program(rdev); rs600_startup()
982 rv515_clock_startup(rdev); rs600_startup()
984 rs600_gpu_init(rdev); rs600_startup()
987 r = rs600_gart_enable(rdev); rs600_startup()
992 r = radeon_wb_init(rdev); rs600_startup()
996 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); rs600_startup()
998 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); rs600_startup()
1003 if (!rdev->irq.installed) { rs600_startup()
1004 r = radeon_irq_kms_init(rdev); rs600_startup()
1009 rs600_irq_set(rdev); rs600_startup()
1010 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); rs600_startup()
1012 r = r100_cp_init(rdev, 1024 * 1024); rs600_startup()
1014 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); rs600_startup()
1018 r = radeon_ib_pool_init(rdev); rs600_startup()
1020 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); rs600_startup()
1024 r = radeon_audio_init(rdev); rs600_startup()
1026 dev_err(rdev->dev, "failed initializing audio\n"); rs600_startup()
1033 int rs600_resume(struct radeon_device *rdev) rs600_resume() argument
1038 rs600_gart_disable(rdev); rs600_resume()
1040 rv515_clock_startup(rdev); rs600_resume()
1042 if (radeon_asic_reset(rdev)) { rs600_resume()
1043 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", rs600_resume()
1048 atom_asic_init(rdev->mode_info.atom_context); rs600_resume()
1050 rv515_clock_startup(rdev); rs600_resume()
1052 radeon_surface_init(rdev); rs600_resume()
1054 rdev->accel_working = true; rs600_resume()
1055 r = rs600_startup(rdev); rs600_resume()
1057 rdev->accel_working = false; rs600_resume()
1062 int rs600_suspend(struct radeon_device *rdev) rs600_suspend() argument
1064 radeon_pm_suspend(rdev); rs600_suspend()
1065 radeon_audio_fini(rdev); rs600_suspend()
1066 r100_cp_disable(rdev); rs600_suspend()
1067 radeon_wb_disable(rdev); rs600_suspend()
1068 rs600_irq_disable(rdev); rs600_suspend()
1069 rs600_gart_disable(rdev); rs600_suspend()
1073 void rs600_fini(struct radeon_device *rdev) rs600_fini() argument
1075 radeon_pm_fini(rdev); rs600_fini()
1076 radeon_audio_fini(rdev); rs600_fini()
1077 r100_cp_fini(rdev); rs600_fini()
1078 radeon_wb_fini(rdev); rs600_fini()
1079 radeon_ib_pool_fini(rdev); rs600_fini()
1080 radeon_gem_fini(rdev); rs600_fini()
1081 rs600_gart_fini(rdev); rs600_fini()
1082 radeon_irq_kms_fini(rdev); rs600_fini()
1083 radeon_fence_driver_fini(rdev); rs600_fini()
1084 radeon_bo_fini(rdev); rs600_fini()
1085 radeon_atombios_fini(rdev); rs600_fini()
1086 kfree(rdev->bios); rs600_fini()
1087 rdev->bios = NULL; rs600_fini()
1090 int rs600_init(struct radeon_device *rdev) rs600_init() argument
1095 rv515_vga_render_disable(rdev); rs600_init()
1097 radeon_scratch_init(rdev); rs600_init()
1099 radeon_surface_init(rdev); rs600_init()
1101 r100_restore_sanity(rdev); rs600_init()
1103 if (!radeon_get_bios(rdev)) { rs600_init()
1104 if (ASIC_IS_AVIVO(rdev)) rs600_init()
1107 if (rdev->is_atom_bios) { rs600_init()
1108 r = radeon_atombios_init(rdev); rs600_init()
1112 dev_err(rdev->dev, "Expecting atombios for RS600 GPU\n"); rs600_init()
1116 if (radeon_asic_reset(rdev)) { rs600_init()
1117 dev_warn(rdev->dev, rs600_init()
1123 if (radeon_boot_test_post_card(rdev) == false) rs600_init()
1127 radeon_get_clock_info(rdev->ddev); rs600_init()
1129 rs600_mc_init(rdev); rs600_init()
1130 rs600_debugfs(rdev); rs600_init()
1132 r = radeon_fence_driver_init(rdev); rs600_init()
1136 r = radeon_bo_init(rdev); rs600_init()
1139 r = rs600_gart_init(rdev); rs600_init()
1142 rs600_set_safe_registers(rdev); rs600_init()
1145 radeon_pm_init(rdev); rs600_init()
1147 rdev->accel_working = true; rs600_init()
1148 r = rs600_startup(rdev); rs600_init()
1151 dev_err(rdev->dev, "Disabling GPU acceleration\n"); rs600_init()
1152 r100_cp_fini(rdev); rs600_init()
1153 radeon_wb_fini(rdev); rs600_init()
1154 radeon_ib_pool_fini(rdev); rs600_init()
1155 rs600_gart_fini(rdev); rs600_init()
1156 radeon_irq_kms_fini(rdev); rs600_init()
1157 rdev->accel_working = false; rs600_init()
H A Dsumo_dpm.c82 struct sumo_power_info *sumo_get_pi(struct radeon_device *rdev) sumo_get_pi() argument
84 struct sumo_power_info *pi = rdev->pm.dpm.priv; sumo_get_pi()
89 static void sumo_gfx_clockgating_enable(struct radeon_device *rdev, bool enable) sumo_gfx_clockgating_enable() argument
104 static void sumo_mg_clockgating_enable(struct radeon_device *rdev, bool enable) sumo_mg_clockgating_enable() argument
121 static void sumo_program_git(struct radeon_device *rdev) sumo_program_git() argument
124 u32 xclk = radeon_get_xclk(rdev); sumo_program_git()
132 static void sumo_program_grsd(struct radeon_device *rdev) sumo_program_grsd() argument
135 u32 xclk = radeon_get_xclk(rdev); sumo_program_grsd()
143 void sumo_gfx_clockgating_initialize(struct radeon_device *rdev) sumo_gfx_clockgating_initialize() argument
145 sumo_program_git(rdev); sumo_gfx_clockgating_initialize()
146 sumo_program_grsd(rdev); sumo_gfx_clockgating_initialize()
149 static void sumo_gfx_powergating_initialize(struct radeon_device *rdev) sumo_gfx_powergating_initialize() argument
155 u32 xclk = radeon_get_xclk(rdev); sumo_gfx_powergating_initialize()
157 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
185 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
197 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
218 if (rdev->family == CHIP_PALM) sumo_gfx_powergating_initialize()
221 sumo_smu_pg_init(rdev); sumo_gfx_powergating_initialize()
227 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
233 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
245 sumo_smu_pg_init(rdev); sumo_gfx_powergating_initialize()
252 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
260 if (rdev->family == CHIP_PALM) { sumo_gfx_powergating_initialize()
272 sumo_smu_pg_init(rdev); sumo_gfx_powergating_initialize()
275 static void sumo_gfx_powergating_enable(struct radeon_device *rdev, bool enable) sumo_gfx_powergating_enable() argument
285 static int sumo_enable_clock_power_gating(struct radeon_device *rdev) sumo_enable_clock_power_gating() argument
287 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_enable_clock_power_gating()
290 sumo_gfx_clockgating_initialize(rdev); sumo_enable_clock_power_gating()
292 sumo_gfx_powergating_initialize(rdev); sumo_enable_clock_power_gating()
294 sumo_mg_clockgating_enable(rdev, true); sumo_enable_clock_power_gating()
296 sumo_gfx_clockgating_enable(rdev, true); sumo_enable_clock_power_gating()
298 sumo_gfx_powergating_enable(rdev, true); sumo_enable_clock_power_gating()
303 static void sumo_disable_clock_power_gating(struct radeon_device *rdev) sumo_disable_clock_power_gating() argument
305 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_disable_clock_power_gating()
308 sumo_gfx_clockgating_enable(rdev, false); sumo_disable_clock_power_gating()
310 sumo_gfx_powergating_enable(rdev, false); sumo_disable_clock_power_gating()
312 sumo_mg_clockgating_enable(rdev, false); sumo_disable_clock_power_gating()
315 static void sumo_calculate_bsp(struct radeon_device *rdev, sumo_calculate_bsp() argument
318 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_calculate_bsp()
319 u32 xclk = radeon_get_xclk(rdev); sumo_calculate_bsp()
334 static void sumo_init_bsp(struct radeon_device *rdev) sumo_init_bsp() argument
336 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_init_bsp()
342 static void sumo_program_bsp(struct radeon_device *rdev, sumo_program_bsp() argument
345 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_bsp()
353 sumo_calculate_bsp(rdev, highest_engine_clock); sumo_program_bsp()
364 static void sumo_write_at(struct radeon_device *rdev, sumo_write_at() argument
385 static void sumo_program_at(struct radeon_device *rdev, sumo_program_at() argument
388 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_at()
416 sumo_write_at(rdev, i, a_t); sumo_program_at()
427 sumo_write_at(rdev, BOOST_DPM_LEVEL, a_t); sumo_program_at()
431 static void sumo_program_tp(struct radeon_device *rdev) sumo_program_tp() argument
453 void sumo_program_vc(struct radeon_device *rdev, u32 vrc) sumo_program_vc() argument
458 void sumo_clear_vc(struct radeon_device *rdev) sumo_clear_vc() argument
463 void sumo_program_sstp(struct radeon_device *rdev) sumo_program_sstp() argument
466 u32 xclk = radeon_get_xclk(rdev); sumo_program_sstp()
474 static void sumo_set_divider_value(struct radeon_device *rdev, sumo_set_divider_value() argument
494 static void sumo_set_ds_dividers(struct radeon_device *rdev, sumo_set_ds_dividers() argument
497 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_set_ds_dividers()
508 static void sumo_set_ss_dividers(struct radeon_device *rdev, sumo_set_ss_dividers() argument
511 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_set_ss_dividers()
522 static void sumo_set_vid(struct radeon_device *rdev, u32 index, u32 vid) sumo_set_vid() argument
531 static void sumo_set_allos_gnb_slow(struct radeon_device *rdev, u32 index, u32 gnb_slow) sumo_set_allos_gnb_slow() argument
533 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_set_allos_gnb_slow()
547 static void sumo_program_power_level(struct radeon_device *rdev, sumo_program_power_level() argument
550 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_power_level()
555 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, sumo_program_power_level()
560 sumo_set_divider_value(rdev, index, dividers.post_div); sumo_program_power_level()
562 sumo_set_vid(rdev, index, pl->vddc_index); sumo_program_power_level()
568 sumo_set_ss_dividers(rdev, index, pl->ss_divider_index); sumo_program_power_level()
569 sumo_set_ds_dividers(rdev, index, pl->ds_divider_index); sumo_program_power_level()
575 sumo_set_allos_gnb_slow(rdev, index, pl->allow_gnb_slow); sumo_program_power_level()
578 sumo_set_tdp_limit(rdev, index, pl->sclk_dpm_tdp_limit); sumo_program_power_level()
581 static void sumo_power_level_enable(struct radeon_device *rdev, u32 index, bool enable) sumo_power_level_enable() argument
600 static bool sumo_dpm_enabled(struct radeon_device *rdev) sumo_dpm_enabled() argument
608 static void sumo_start_dpm(struct radeon_device *rdev) sumo_start_dpm() argument
613 static void sumo_stop_dpm(struct radeon_device *rdev) sumo_stop_dpm() argument
618 static void sumo_set_forced_mode(struct radeon_device *rdev, bool enable) sumo_set_forced_mode() argument
626 static void sumo_set_forced_mode_enabled(struct radeon_device *rdev) sumo_set_forced_mode_enabled() argument
630 sumo_set_forced_mode(rdev, true); sumo_set_forced_mode_enabled()
631 for (i = 0; i < rdev->usec_timeout; i++) { sumo_set_forced_mode_enabled()
638 static void sumo_wait_for_level_0(struct radeon_device *rdev) sumo_wait_for_level_0() argument
642 for (i = 0; i < rdev->usec_timeout; i++) { sumo_wait_for_level_0()
647 for (i = 0; i < rdev->usec_timeout; i++) { sumo_wait_for_level_0()
654 static void sumo_set_forced_mode_disabled(struct radeon_device *rdev) sumo_set_forced_mode_disabled() argument
656 sumo_set_forced_mode(rdev, false); sumo_set_forced_mode_disabled()
659 static void sumo_enable_power_level_0(struct radeon_device *rdev) sumo_enable_power_level_0() argument
661 sumo_power_level_enable(rdev, 0, true); sumo_enable_power_level_0()
664 static void sumo_patch_boost_state(struct radeon_device *rdev, sumo_patch_boost_state() argument
667 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_patch_boost_state()
678 static void sumo_pre_notify_alt_vddnb_change(struct radeon_device *rdev, sumo_pre_notify_alt_vddnb_change() argument
693 sumo_smu_notify_alt_vddnb_change(rdev, 0, 0); sumo_pre_notify_alt_vddnb_change()
696 static void sumo_post_notify_alt_vddnb_change(struct radeon_device *rdev, sumo_post_notify_alt_vddnb_change() argument
711 sumo_smu_notify_alt_vddnb_change(rdev, 1, 1); sumo_post_notify_alt_vddnb_change()
714 static void sumo_enable_boost(struct radeon_device *rdev, sumo_enable_boost() argument
722 sumo_boost_state_enable(rdev, true); sumo_enable_boost()
724 sumo_boost_state_enable(rdev, false); sumo_enable_boost()
727 static void sumo_set_forced_level(struct radeon_device *rdev, u32 index) sumo_set_forced_level() argument
732 static void sumo_set_forced_level_0(struct radeon_device *rdev) sumo_set_forced_level_0() argument
734 sumo_set_forced_level(rdev, 0); sumo_set_forced_level_0()
737 static void sumo_program_wl(struct radeon_device *rdev, sumo_program_wl() argument
752 static void sumo_program_power_levels_0_to_n(struct radeon_device *rdev, sumo_program_power_levels_0_to_n() argument
756 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_power_levels_0_to_n()
763 sumo_program_power_level(rdev, &new_ps->levels[i], i); sumo_program_power_levels_0_to_n()
764 sumo_power_level_enable(rdev, i, true); sumo_program_power_levels_0_to_n()
768 sumo_power_level_enable(rdev, i, false); sumo_program_power_levels_0_to_n()
771 sumo_program_power_level(rdev, &pi->boost_pl, BOOST_DPM_LEVEL); sumo_program_power_levels_0_to_n()
774 static void sumo_enable_acpi_pm(struct radeon_device *rdev) sumo_enable_acpi_pm() argument
779 static void sumo_program_power_level_enter_state(struct radeon_device *rdev) sumo_program_power_level_enter_state() argument
784 static void sumo_program_acpi_power_level(struct radeon_device *rdev) sumo_program_acpi_power_level() argument
786 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_acpi_power_level()
790 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, sumo_program_acpi_power_level()
800 static void sumo_program_bootup_state(struct radeon_device *rdev) sumo_program_bootup_state() argument
802 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_bootup_state()
806 sumo_program_power_level(rdev, &pi->boot_pl, 0); sumo_program_bootup_state()
812 sumo_power_level_enable(rdev, i, false); sumo_program_bootup_state()
815 static void sumo_setup_uvd_clocks(struct radeon_device *rdev, sumo_setup_uvd_clocks() argument
819 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_setup_uvd_clocks()
822 sumo_gfx_powergating_enable(rdev, false); sumo_setup_uvd_clocks()
825 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); sumo_setup_uvd_clocks()
830 sumo_gfx_powergating_enable(rdev, true); sumo_setup_uvd_clocks()
834 static void sumo_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, sumo_set_uvd_clock_before_set_eng_clock() argument
849 sumo_setup_uvd_clocks(rdev, new_rps, old_rps); sumo_set_uvd_clock_before_set_eng_clock()
852 static void sumo_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, sumo_set_uvd_clock_after_set_eng_clock() argument
867 sumo_setup_uvd_clocks(rdev, new_rps, old_rps); sumo_set_uvd_clock_after_set_eng_clock()
870 void sumo_take_smu_control(struct radeon_device *rdev, bool enable) sumo_take_smu_control() argument
889 static void sumo_enable_sclk_ds(struct radeon_device *rdev, bool enable) sumo_enable_sclk_ds() argument
910 static void sumo_program_bootup_at(struct radeon_device *rdev) sumo_program_bootup_at() argument
916 static void sumo_reset_am(struct radeon_device *rdev) sumo_reset_am() argument
921 static void sumo_start_am(struct radeon_device *rdev) sumo_start_am() argument
926 static void sumo_program_ttp(struct radeon_device *rdev) sumo_program_ttp() argument
928 u32 xclk = radeon_get_xclk(rdev); sumo_program_ttp()
941 static void sumo_program_ttt(struct radeon_device *rdev) sumo_program_ttt() argument
944 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_program_ttt()
953 static void sumo_enable_voltage_scaling(struct radeon_device *rdev, bool enable) sumo_enable_voltage_scaling() argument
964 static void sumo_override_cnb_thermal_events(struct radeon_device *rdev) sumo_override_cnb_thermal_events() argument
970 static void sumo_program_dc_hto(struct radeon_device *rdev) sumo_program_dc_hto() argument
974 u32 xclk = radeon_get_xclk(rdev); sumo_program_dc_hto()
985 static void sumo_force_nbp_state(struct radeon_device *rdev, sumo_force_nbp_state() argument
988 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_force_nbp_state()
1004 u32 sumo_get_sleep_divider_id_from_clock(struct radeon_device *rdev, sumo_get_sleep_divider_id_from_clock() argument
1008 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_get_sleep_divider_id_from_clock()
1029 static u32 sumo_get_valid_engine_clock(struct radeon_device *rdev, sumo_get_valid_engine_clock() argument
1032 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_get_valid_engine_clock()
1043 static void sumo_patch_thermal_state(struct radeon_device *rdev, sumo_patch_thermal_state() argument
1047 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_patch_thermal_state()
1067 sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, sclk_in_sr); sumo_patch_thermal_state()
1070 sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, SUMO_MINIMUM_ENGINE_CLOCK); sumo_patch_thermal_state()
1087 static void sumo_apply_state_adjust_rules(struct radeon_device *rdev, sumo_apply_state_adjust_rules() argument
1093 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_apply_state_adjust_rules()
1100 return sumo_patch_thermal_state(rdev, ps, current_ps); sumo_apply_state_adjust_rules()
1118 sumo_get_valid_engine_clock(rdev, min_sclk); sumo_apply_state_adjust_rules()
1121 sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, sclk_in_sr); sumo_apply_state_adjust_rules()
1124 sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, SUMO_MINIMUM_ENGINE_CLOCK); sumo_apply_state_adjust_rules()
1152 static void sumo_cleanup_asic(struct radeon_device *rdev) sumo_cleanup_asic() argument
1154 sumo_take_smu_control(rdev, false); sumo_cleanup_asic()
1157 static int sumo_set_thermal_temperature_range(struct radeon_device *rdev, sumo_set_thermal_temperature_range() argument
1175 rdev->pm.dpm.thermal.min_temp = low_temp; sumo_set_thermal_temperature_range()
1176 rdev->pm.dpm.thermal.max_temp = high_temp; sumo_set_thermal_temperature_range()
1181 static void sumo_update_current_ps(struct radeon_device *rdev, sumo_update_current_ps() argument
1185 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_update_current_ps()
1192 static void sumo_update_requested_ps(struct radeon_device *rdev, sumo_update_requested_ps() argument
1196 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_update_requested_ps()
1203 int sumo_dpm_enable(struct radeon_device *rdev) sumo_dpm_enable() argument
1205 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_enable()
1207 if (sumo_dpm_enabled(rdev)) sumo_dpm_enable()
1210 sumo_program_bootup_state(rdev); sumo_dpm_enable()
1211 sumo_init_bsp(rdev); sumo_dpm_enable()
1212 sumo_reset_am(rdev); sumo_dpm_enable()
1213 sumo_program_tp(rdev); sumo_dpm_enable()
1214 sumo_program_bootup_at(rdev); sumo_dpm_enable()
1215 sumo_start_am(rdev); sumo_dpm_enable()
1217 sumo_program_ttp(rdev); sumo_dpm_enable()
1218 sumo_program_ttt(rdev); sumo_dpm_enable()
1220 sumo_program_dc_hto(rdev); sumo_dpm_enable()
1221 sumo_program_power_level_enter_state(rdev); sumo_dpm_enable()
1222 sumo_enable_voltage_scaling(rdev, true); sumo_dpm_enable()
1223 sumo_program_sstp(rdev); sumo_dpm_enable()
1224 sumo_program_vc(rdev, SUMO_VRC_DFLT); sumo_dpm_enable()
1225 sumo_override_cnb_thermal_events(rdev); sumo_dpm_enable()
1226 sumo_start_dpm(rdev); sumo_dpm_enable()
1227 sumo_wait_for_level_0(rdev); sumo_dpm_enable()
1229 sumo_enable_sclk_ds(rdev, true); sumo_dpm_enable()
1231 sumo_enable_boost_timer(rdev); sumo_dpm_enable()
1233 sumo_update_current_ps(rdev, rdev->pm.dpm.boot_ps); sumo_dpm_enable()
1238 int sumo_dpm_late_enable(struct radeon_device *rdev) sumo_dpm_late_enable() argument
1242 ret = sumo_enable_clock_power_gating(rdev); sumo_dpm_late_enable()
1246 if (rdev->irq.installed && sumo_dpm_late_enable()
1247 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { sumo_dpm_late_enable()
1248 ret = sumo_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); sumo_dpm_late_enable()
1251 rdev->irq.dpm_thermal = true; sumo_dpm_late_enable()
1252 radeon_irq_set(rdev); sumo_dpm_late_enable()
1258 void sumo_dpm_disable(struct radeon_device *rdev) sumo_dpm_disable() argument
1260 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_disable()
1262 if (!sumo_dpm_enabled(rdev)) sumo_dpm_disable()
1264 sumo_disable_clock_power_gating(rdev); sumo_dpm_disable()
1266 sumo_enable_sclk_ds(rdev, false); sumo_dpm_disable()
1267 sumo_clear_vc(rdev); sumo_dpm_disable()
1268 sumo_wait_for_level_0(rdev); sumo_dpm_disable()
1269 sumo_stop_dpm(rdev); sumo_dpm_disable()
1270 sumo_enable_voltage_scaling(rdev, false); sumo_dpm_disable()
1272 if (rdev->irq.installed && sumo_dpm_disable()
1273 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { sumo_dpm_disable()
1274 rdev->irq.dpm_thermal = false; sumo_dpm_disable()
1275 radeon_irq_set(rdev); sumo_dpm_disable()
1278 sumo_update_current_ps(rdev, rdev->pm.dpm.boot_ps); sumo_dpm_disable()
1281 int sumo_dpm_pre_set_power_state(struct radeon_device *rdev) sumo_dpm_pre_set_power_state() argument
1283 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_pre_set_power_state()
1284 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; sumo_dpm_pre_set_power_state()
1287 sumo_update_requested_ps(rdev, new_ps); sumo_dpm_pre_set_power_state()
1290 sumo_apply_state_adjust_rules(rdev, sumo_dpm_pre_set_power_state()
1297 int sumo_dpm_set_power_state(struct radeon_device *rdev) sumo_dpm_set_power_state() argument
1299 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_set_power_state()
1304 sumo_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); sumo_dpm_set_power_state()
1306 sumo_enable_boost(rdev, new_ps, false); sumo_dpm_set_power_state()
1307 sumo_patch_boost_state(rdev, new_ps); sumo_dpm_set_power_state()
1310 sumo_pre_notify_alt_vddnb_change(rdev, new_ps, old_ps); sumo_dpm_set_power_state()
1311 sumo_enable_power_level_0(rdev); sumo_dpm_set_power_state()
1312 sumo_set_forced_level_0(rdev); sumo_dpm_set_power_state()
1313 sumo_set_forced_mode_enabled(rdev); sumo_dpm_set_power_state()
1314 sumo_wait_for_level_0(rdev); sumo_dpm_set_power_state()
1315 sumo_program_power_levels_0_to_n(rdev, new_ps, old_ps); sumo_dpm_set_power_state()
1316 sumo_program_wl(rdev, new_ps); sumo_dpm_set_power_state()
1317 sumo_program_bsp(rdev, new_ps); sumo_dpm_set_power_state()
1318 sumo_program_at(rdev, new_ps); sumo_dpm_set_power_state()
1319 sumo_force_nbp_state(rdev, new_ps); sumo_dpm_set_power_state()
1320 sumo_set_forced_mode_disabled(rdev); sumo_dpm_set_power_state()
1321 sumo_set_forced_mode_enabled(rdev); sumo_dpm_set_power_state()
1322 sumo_set_forced_mode_disabled(rdev); sumo_dpm_set_power_state()
1323 sumo_post_notify_alt_vddnb_change(rdev, new_ps, old_ps); sumo_dpm_set_power_state()
1326 sumo_enable_boost(rdev, new_ps, true); sumo_dpm_set_power_state()
1328 sumo_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); sumo_dpm_set_power_state()
1333 void sumo_dpm_post_set_power_state(struct radeon_device *rdev) sumo_dpm_post_set_power_state() argument
1335 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_post_set_power_state()
1338 sumo_update_current_ps(rdev, new_ps); sumo_dpm_post_set_power_state()
1342 void sumo_dpm_reset_asic(struct radeon_device *rdev)
1344 sumo_program_bootup_state(rdev);
1345 sumo_enable_power_level_0(rdev);
1346 sumo_set_forced_level_0(rdev);
1347 sumo_set_forced_mode_enabled(rdev);
1348 sumo_wait_for_level_0(rdev);
1349 sumo_set_forced_mode_disabled(rdev);
1350 sumo_set_forced_mode_enabled(rdev);
1351 sumo_set_forced_mode_disabled(rdev);
1355 void sumo_dpm_setup_asic(struct radeon_device *rdev) sumo_dpm_setup_asic() argument
1357 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_setup_asic()
1359 sumo_initialize_m3_arb(rdev); sumo_dpm_setup_asic()
1360 pi->fw_version = sumo_get_running_fw_version(rdev); sumo_dpm_setup_asic()
1362 sumo_program_acpi_power_level(rdev); sumo_dpm_setup_asic()
1363 sumo_enable_acpi_pm(rdev); sumo_dpm_setup_asic()
1364 sumo_take_smu_control(rdev, true); sumo_dpm_setup_asic()
1367 void sumo_dpm_display_configuration_changed(struct radeon_device *rdev) sumo_dpm_display_configuration_changed() argument
1393 static void sumo_patch_boot_state(struct radeon_device *rdev, sumo_patch_boot_state() argument
1396 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_patch_boot_state()
1403 static void sumo_parse_pplib_non_clock_info(struct radeon_device *rdev, sumo_parse_pplib_non_clock_info() argument
1423 rdev->pm.dpm.boot_ps = rps; sumo_parse_pplib_non_clock_info()
1424 sumo_patch_boot_state(rdev, ps); sumo_parse_pplib_non_clock_info()
1427 rdev->pm.dpm.uvd_ps = rps; sumo_parse_pplib_non_clock_info()
1430 static void sumo_parse_pplib_clock_info(struct radeon_device *rdev, sumo_parse_pplib_clock_info() argument
1434 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_parse_pplib_clock_info()
1453 static int sumo_parse_power_table(struct radeon_device *rdev) sumo_parse_power_table() argument
1455 struct radeon_mode_info *mode_info = &rdev->mode_info; sumo_parse_power_table()
1485 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * sumo_parse_power_table()
1487 if (!rdev->pm.dpm.ps) sumo_parse_power_table()
1496 if (!rdev->pm.power_state[i].clock_info) sumo_parse_power_table()
1500 kfree(rdev->pm.dpm.ps); sumo_parse_power_table()
1503 rdev->pm.dpm.ps[i].ps_priv = ps; sumo_parse_power_table()
1514 sumo_parse_pplib_clock_info(rdev, sumo_parse_power_table()
1515 &rdev->pm.dpm.ps[i], k, sumo_parse_power_table()
1519 sumo_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], sumo_parse_power_table()
1524 rdev->pm.dpm.num_ps = state_array->ucNumEntries; sumo_parse_power_table()
1528 u32 sumo_convert_vid2_to_vid7(struct radeon_device *rdev, sumo_convert_vid2_to_vid7() argument
1543 u32 sumo_convert_vid7_to_vid2(struct radeon_device *rdev,
1558 static u16 sumo_convert_voltage_index_to_value(struct radeon_device *rdev, sumo_convert_voltage_index_to_value() argument
1561 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_convert_voltage_index_to_value()
1562 u32 vid_7bit = sumo_convert_vid2_to_vid7(rdev, &pi->sys_info.vid_mapping_table, vid_2bit); sumo_convert_voltage_index_to_value()
1570 static void sumo_construct_display_voltage_mapping_table(struct radeon_device *rdev, sumo_construct_display_voltage_mapping_table() argument
1592 void sumo_construct_sclk_voltage_mapping_table(struct radeon_device *rdev, sumo_construct_sclk_voltage_mapping_table() argument
1614 void sumo_construct_vid_mapping_table(struct radeon_device *rdev, sumo_construct_vid_mapping_table() argument
1655 static int sumo_parse_sys_info_table(struct radeon_device *rdev) sumo_parse_sys_info_table() argument
1657 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_parse_sys_info_table()
1658 struct radeon_mode_info *mode_info = &rdev->mode_info; sumo_parse_sys_info_table()
1713 sumo_construct_display_voltage_mapping_table(rdev, sumo_parse_sys_info_table()
1716 sumo_construct_sclk_voltage_mapping_table(rdev, sumo_parse_sys_info_table()
1719 sumo_construct_vid_mapping_table(rdev, &pi->sys_info.vid_mapping_table, sumo_parse_sys_info_table()
1726 static void sumo_construct_boot_and_acpi_state(struct radeon_device *rdev) sumo_construct_boot_and_acpi_state() argument
1728 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_construct_boot_and_acpi_state()
1740 int sumo_dpm_init(struct radeon_device *rdev) sumo_dpm_init() argument
1749 rdev->pm.dpm.priv = pi; sumo_dpm_init()
1752 if ((rdev->family == CHIP_PALM) && (hw_rev < 3)) sumo_dpm_init()
1763 if (rdev->family == CHIP_PALM) sumo_dpm_init()
1771 ret = sumo_parse_sys_info_table(rdev); sumo_dpm_init()
1775 sumo_construct_boot_and_acpi_state(rdev); sumo_dpm_init()
1777 ret = r600_get_platform_caps(rdev); sumo_dpm_init()
1781 ret = sumo_parse_power_table(rdev); sumo_dpm_init()
1794 void sumo_dpm_print_power_state(struct radeon_device *rdev, sumo_dpm_print_power_state() argument
1807 sumo_convert_voltage_index_to_value(rdev, pl->vddc_index)); sumo_dpm_print_power_state()
1809 r600_dpm_print_ps_status(rdev, rps); sumo_dpm_print_power_state()
1812 void sumo_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, sumo_dpm_debugfs_print_current_performance_level() argument
1815 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_debugfs_print_current_performance_level()
1828 sumo_convert_voltage_index_to_value(rdev, pl->vddc_index)); sumo_dpm_debugfs_print_current_performance_level()
1836 sumo_convert_voltage_index_to_value(rdev, pl->vddc_index)); sumo_dpm_debugfs_print_current_performance_level()
1840 u32 sumo_dpm_get_current_sclk(struct radeon_device *rdev) sumo_dpm_get_current_sclk() argument
1842 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_get_current_sclk()
1861 u32 sumo_dpm_get_current_mclk(struct radeon_device *rdev) sumo_dpm_get_current_mclk() argument
1863 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_get_current_mclk()
1868 void sumo_dpm_fini(struct radeon_device *rdev) sumo_dpm_fini() argument
1872 sumo_cleanup_asic(rdev); /* ??? */ sumo_dpm_fini()
1874 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { sumo_dpm_fini()
1875 kfree(rdev->pm.dpm.ps[i].ps_priv); sumo_dpm_fini()
1877 kfree(rdev->pm.dpm.ps); sumo_dpm_fini()
1878 kfree(rdev->pm.dpm.priv); sumo_dpm_fini()
1881 u32 sumo_dpm_get_sclk(struct radeon_device *rdev, bool low) sumo_dpm_get_sclk() argument
1883 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_get_sclk()
1892 u32 sumo_dpm_get_mclk(struct radeon_device *rdev, bool low) sumo_dpm_get_mclk() argument
1894 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_get_mclk()
1899 int sumo_dpm_force_performance_level(struct radeon_device *rdev, sumo_dpm_force_performance_level() argument
1902 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_dpm_force_performance_level()
1912 sumo_enable_boost(rdev, rps, false); sumo_dpm_force_performance_level()
1913 sumo_power_level_enable(rdev, ps->num_levels - 1, true); sumo_dpm_force_performance_level()
1914 sumo_set_forced_level(rdev, ps->num_levels - 1); sumo_dpm_force_performance_level()
1915 sumo_set_forced_mode_enabled(rdev); sumo_dpm_force_performance_level()
1917 sumo_power_level_enable(rdev, i, false); sumo_dpm_force_performance_level()
1919 sumo_set_forced_mode(rdev, false); sumo_dpm_force_performance_level()
1920 sumo_set_forced_mode_enabled(rdev); sumo_dpm_force_performance_level()
1921 sumo_set_forced_mode(rdev, false); sumo_dpm_force_performance_level()
1924 sumo_enable_boost(rdev, rps, false); sumo_dpm_force_performance_level()
1925 sumo_power_level_enable(rdev, 0, true); sumo_dpm_force_performance_level()
1926 sumo_set_forced_level(rdev, 0); sumo_dpm_force_performance_level()
1927 sumo_set_forced_mode_enabled(rdev); sumo_dpm_force_performance_level()
1929 sumo_power_level_enable(rdev, i, false); sumo_dpm_force_performance_level()
1931 sumo_set_forced_mode(rdev, false); sumo_dpm_force_performance_level()
1932 sumo_set_forced_mode_enabled(rdev); sumo_dpm_force_performance_level()
1933 sumo_set_forced_mode(rdev, false); sumo_dpm_force_performance_level()
1936 sumo_power_level_enable(rdev, i, true); sumo_dpm_force_performance_level()
1939 sumo_enable_boost(rdev, rps, true); sumo_dpm_force_performance_level()
1942 rdev->pm.dpm.forced_level = level; sumo_dpm_force_performance_level()
H A Dtrinity_dpm.c339 extern void vce_v1_0_enable_mgcg(struct radeon_device *rdev, bool enable);
340 static void trinity_program_clk_gating_hw_sequence(struct radeon_device *rdev,
342 static void trinity_override_dynamic_mg_powergating(struct radeon_device *rdev);
343 static void trinity_apply_state_adjust_rules(struct radeon_device *rdev,
354 static struct trinity_power_info *trinity_get_pi(struct radeon_device *rdev) trinity_get_pi() argument
356 struct trinity_power_info *pi = rdev->pm.dpm.priv; trinity_get_pi()
361 static void trinity_gfx_powergating_initialize(struct radeon_device *rdev) trinity_gfx_powergating_initialize() argument
363 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_gfx_powergating_initialize()
367 u32 xclk = radeon_get_xclk(rdev); trinity_gfx_powergating_initialize()
372 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, trinity_gfx_powergating_initialize()
392 trinity_override_dynamic_mg_powergating(rdev); trinity_gfx_powergating_initialize()
401 static void trinity_mg_clockgating_enable(struct radeon_device *rdev, trinity_mg_clockgating_enable() argument
430 static void trinity_mg_clockgating_initialize(struct radeon_device *rdev) trinity_mg_clockgating_initialize() argument
438 trinity_program_clk_gating_hw_sequence(rdev, seq, count); trinity_mg_clockgating_initialize()
441 static void trinity_gfx_clockgating_enable(struct radeon_device *rdev, trinity_gfx_clockgating_enable() argument
454 static void trinity_program_clk_gating_hw_sequence(struct radeon_device *rdev, trinity_program_clk_gating_hw_sequence() argument
463 static void trinity_program_override_mgpg_sequences(struct radeon_device *rdev, trinity_program_override_mgpg_sequences() argument
473 static void trinity_override_dynamic_mg_powergating(struct radeon_device *rdev) trinity_override_dynamic_mg_powergating() argument
481 trinity_program_override_mgpg_sequences(rdev, seq, count); trinity_override_dynamic_mg_powergating()
484 static void trinity_ls_clockgating_enable(struct radeon_device *rdev, trinity_ls_clockgating_enable() argument
498 trinity_program_clk_gating_hw_sequence(rdev, seq, count); trinity_ls_clockgating_enable()
501 static void trinity_gfx_powergating_enable(struct radeon_device *rdev, trinity_gfx_powergating_enable() argument
515 static void trinity_gfx_dynamic_mgpg_enable(struct radeon_device *rdev, trinity_gfx_dynamic_mgpg_enable() argument
540 trinity_gfx_dynamic_mgpg_config(rdev); trinity_gfx_dynamic_mgpg_enable()
544 static void trinity_enable_clock_power_gating(struct radeon_device *rdev) trinity_enable_clock_power_gating() argument
546 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_enable_clock_power_gating()
549 sumo_gfx_clockgating_initialize(rdev); trinity_enable_clock_power_gating()
551 trinity_mg_clockgating_initialize(rdev); trinity_enable_clock_power_gating()
553 trinity_gfx_powergating_initialize(rdev); trinity_enable_clock_power_gating()
555 trinity_ls_clockgating_enable(rdev, true); trinity_enable_clock_power_gating()
556 trinity_mg_clockgating_enable(rdev, true); trinity_enable_clock_power_gating()
559 trinity_gfx_clockgating_enable(rdev, true); trinity_enable_clock_power_gating()
561 trinity_gfx_dynamic_mgpg_enable(rdev, true); trinity_enable_clock_power_gating()
563 trinity_gfx_powergating_enable(rdev, true); trinity_enable_clock_power_gating()
566 static void trinity_disable_clock_power_gating(struct radeon_device *rdev) trinity_disable_clock_power_gating() argument
568 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_disable_clock_power_gating()
571 trinity_gfx_powergating_enable(rdev, false); trinity_disable_clock_power_gating()
573 trinity_gfx_dynamic_mgpg_enable(rdev, false); trinity_disable_clock_power_gating()
575 trinity_gfx_clockgating_enable(rdev, false); trinity_disable_clock_power_gating()
577 trinity_mg_clockgating_enable(rdev, false); trinity_disable_clock_power_gating()
578 trinity_ls_clockgating_enable(rdev, false); trinity_disable_clock_power_gating()
582 static void trinity_set_divider_value(struct radeon_device *rdev, trinity_set_divider_value() argument
590 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, trinity_set_divider_value()
600 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, trinity_set_divider_value()
611 static void trinity_set_ds_dividers(struct radeon_device *rdev, trinity_set_ds_dividers() argument
623 static void trinity_set_ss_dividers(struct radeon_device *rdev, trinity_set_ss_dividers() argument
635 static void trinity_set_vid(struct radeon_device *rdev, u32 index, u32 vid) trinity_set_vid() argument
637 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_set_vid()
638 u32 vid_7bit = sumo_convert_vid2_to_vid7(rdev, &pi->sys_info.vid_mapping_table, vid); trinity_set_vid()
653 static void trinity_set_allos_gnb_slow(struct radeon_device *rdev, trinity_set_allos_gnb_slow() argument
665 static void trinity_set_force_nbp_state(struct radeon_device *rdev, trinity_set_force_nbp_state() argument
677 static void trinity_set_display_wm(struct radeon_device *rdev, trinity_set_display_wm() argument
689 static void trinity_set_vce_wm(struct radeon_device *rdev, trinity_set_vce_wm() argument
701 static void trinity_set_at(struct radeon_device *rdev, trinity_set_at() argument
713 static void trinity_program_power_level(struct radeon_device *rdev, trinity_program_power_level() argument
716 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_program_power_level()
721 trinity_set_divider_value(rdev, index, pl->sclk); trinity_program_power_level()
722 trinity_set_vid(rdev, index, pl->vddc_index); trinity_program_power_level()
723 trinity_set_ss_dividers(rdev, index, pl->ss_divider_index); trinity_program_power_level()
724 trinity_set_ds_dividers(rdev, index, pl->ds_divider_index); trinity_program_power_level()
725 trinity_set_allos_gnb_slow(rdev, index, pl->allow_gnb_slow); trinity_program_power_level()
726 trinity_set_force_nbp_state(rdev, index, pl->force_nbp_state); trinity_program_power_level()
727 trinity_set_display_wm(rdev, index, pl->display_wm); trinity_program_power_level()
728 trinity_set_vce_wm(rdev, index, pl->vce_wm); trinity_program_power_level()
729 trinity_set_at(rdev, index, pi->at[index]); trinity_program_power_level()
732 static void trinity_power_level_enable_disable(struct radeon_device *rdev, trinity_power_level_enable_disable() argument
745 static bool trinity_dpm_enabled(struct radeon_device *rdev) trinity_dpm_enabled() argument
753 static void trinity_start_dpm(struct radeon_device *rdev) trinity_start_dpm() argument
764 trinity_dpm_config(rdev, true); trinity_start_dpm()
767 static void trinity_wait_for_dpm_enabled(struct radeon_device *rdev) trinity_wait_for_dpm_enabled() argument
771 for (i = 0; i < rdev->usec_timeout; i++) { trinity_wait_for_dpm_enabled()
776 for (i = 0; i < rdev->usec_timeout; i++) { trinity_wait_for_dpm_enabled()
781 for (i = 0; i < rdev->usec_timeout; i++) { trinity_wait_for_dpm_enabled()
788 static void trinity_stop_dpm(struct radeon_device *rdev) trinity_stop_dpm() argument
798 trinity_dpm_config(rdev, false); trinity_stop_dpm()
801 static void trinity_start_am(struct radeon_device *rdev) trinity_start_am() argument
806 static void trinity_reset_am(struct radeon_device *rdev) trinity_reset_am() argument
812 static void trinity_wait_for_level_0(struct radeon_device *rdev) trinity_wait_for_level_0() argument
816 for (i = 0; i < rdev->usec_timeout; i++) { trinity_wait_for_level_0()
823 static void trinity_enable_power_level_0(struct radeon_device *rdev) trinity_enable_power_level_0() argument
825 trinity_power_level_enable_disable(rdev, 0, true); trinity_enable_power_level_0()
828 static void trinity_force_level_0(struct radeon_device *rdev) trinity_force_level_0() argument
830 trinity_dpm_force_state(rdev, 0); trinity_force_level_0()
833 static void trinity_unforce_levels(struct radeon_device *rdev) trinity_unforce_levels() argument
835 trinity_dpm_no_forced_level(rdev); trinity_unforce_levels()
838 static void trinity_program_power_levels_0_to_n(struct radeon_device *rdev, trinity_program_power_levels_0_to_n() argument
848 trinity_program_power_level(rdev, &new_ps->levels[i], i); trinity_program_power_levels_0_to_n()
849 trinity_power_level_enable_disable(rdev, i, true); trinity_program_power_levels_0_to_n()
853 trinity_power_level_enable_disable(rdev, i, false); trinity_program_power_levels_0_to_n()
856 static void trinity_program_bootup_state(struct radeon_device *rdev) trinity_program_bootup_state() argument
858 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_program_bootup_state()
861 trinity_program_power_level(rdev, &pi->boot_pl, 0); trinity_program_bootup_state()
862 trinity_power_level_enable_disable(rdev, 0, true); trinity_program_bootup_state()
865 trinity_power_level_enable_disable(rdev, i, false); trinity_program_bootup_state()
868 static void trinity_setup_uvd_clock_table(struct radeon_device *rdev, trinity_setup_uvd_clock_table() argument
880 static void trinity_setup_uvd_dpm_interval(struct radeon_device *rdev, trinity_setup_uvd_dpm_interval() argument
886 u32 xclk = radeon_get_xclk(rdev); trinity_setup_uvd_dpm_interval()
920 static void trinity_setup_uvd_clocks(struct radeon_device *rdev, trinity_setup_uvd_clocks() argument
924 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_setup_uvd_clocks()
927 trinity_gfx_powergating_enable(rdev, false); trinity_setup_uvd_clocks()
933 trinity_setup_uvd_dpm_interval(rdev, 0); trinity_setup_uvd_clocks()
935 trinity_setup_uvd_clock_table(rdev, new_rps); trinity_setup_uvd_clocks()
942 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); trinity_setup_uvd_clocks()
944 trinity_setup_uvd_dpm_interval(rdev, 3000); trinity_setup_uvd_clocks()
947 trinity_uvd_dpm_config(rdev); trinity_setup_uvd_clocks()
953 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); trinity_setup_uvd_clocks()
957 trinity_gfx_powergating_enable(rdev, true); trinity_setup_uvd_clocks()
961 static void trinity_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, trinity_set_uvd_clock_before_set_eng_clock() argument
972 trinity_setup_uvd_clocks(rdev, new_rps, old_rps); trinity_set_uvd_clock_before_set_eng_clock()
975 static void trinity_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, trinity_set_uvd_clock_after_set_eng_clock() argument
986 trinity_setup_uvd_clocks(rdev, new_rps, old_rps); trinity_set_uvd_clock_after_set_eng_clock()
989 static void trinity_set_vce_clock(struct radeon_device *rdev, trinity_set_vce_clock() argument
997 vce_v1_0_enable_mgcg(rdev, false); trinity_set_vce_clock()
999 vce_v1_0_enable_mgcg(rdev, true); trinity_set_vce_clock()
1000 radeon_set_vce_clocks(rdev, new_rps->evclk, new_rps->ecclk); trinity_set_vce_clock()
1004 static void trinity_program_ttt(struct radeon_device *rdev) trinity_program_ttt() argument
1006 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_program_ttt()
1015 static void trinity_enable_att(struct radeon_device *rdev) trinity_enable_att() argument
1024 static void trinity_program_sclk_dpm(struct radeon_device *rdev) trinity_program_sclk_dpm() argument
1029 u32 xclk = radeon_get_xclk(rdev); trinity_program_sclk_dpm()
1042 static int trinity_set_thermal_temperature_range(struct radeon_device *rdev, trinity_set_thermal_temperature_range() argument
1060 rdev->pm.dpm.thermal.min_temp = low_temp; trinity_set_thermal_temperature_range()
1061 rdev->pm.dpm.thermal.max_temp = high_temp; trinity_set_thermal_temperature_range()
1066 static void trinity_update_current_ps(struct radeon_device *rdev, trinity_update_current_ps() argument
1070 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_update_current_ps()
1077 static void trinity_update_requested_ps(struct radeon_device *rdev, trinity_update_requested_ps() argument
1081 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_update_requested_ps()
1088 void trinity_dpm_enable_bapm(struct radeon_device *rdev, bool enable) trinity_dpm_enable_bapm() argument
1090 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_enable_bapm()
1093 trinity_acquire_mutex(rdev); trinity_dpm_enable_bapm()
1094 trinity_dpm_bapm_enable(rdev, enable); trinity_dpm_enable_bapm()
1095 trinity_release_mutex(rdev); trinity_dpm_enable_bapm()
1099 int trinity_dpm_enable(struct radeon_device *rdev) trinity_dpm_enable() argument
1101 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_enable()
1103 trinity_acquire_mutex(rdev); trinity_dpm_enable()
1105 if (trinity_dpm_enabled(rdev)) { trinity_dpm_enable()
1106 trinity_release_mutex(rdev); trinity_dpm_enable()
1110 trinity_program_bootup_state(rdev); trinity_dpm_enable()
1111 sumo_program_vc(rdev, 0x00C00033); trinity_dpm_enable()
1112 trinity_start_am(rdev); trinity_dpm_enable()
1114 trinity_program_ttt(rdev); trinity_dpm_enable()
1115 trinity_enable_att(rdev); trinity_dpm_enable()
1117 trinity_program_sclk_dpm(rdev); trinity_dpm_enable()
1118 trinity_start_dpm(rdev); trinity_dpm_enable()
1119 trinity_wait_for_dpm_enabled(rdev); trinity_dpm_enable()
1120 trinity_dpm_bapm_enable(rdev, false); trinity_dpm_enable()
1121 trinity_release_mutex(rdev); trinity_dpm_enable()
1123 trinity_update_current_ps(rdev, rdev->pm.dpm.boot_ps); trinity_dpm_enable()
1128 int trinity_dpm_late_enable(struct radeon_device *rdev) trinity_dpm_late_enable() argument
1132 trinity_acquire_mutex(rdev); trinity_dpm_late_enable()
1133 trinity_enable_clock_power_gating(rdev); trinity_dpm_late_enable()
1135 if (rdev->irq.installed && trinity_dpm_late_enable()
1136 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { trinity_dpm_late_enable()
1137 ret = trinity_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); trinity_dpm_late_enable()
1139 trinity_release_mutex(rdev); trinity_dpm_late_enable()
1142 rdev->irq.dpm_thermal = true; trinity_dpm_late_enable()
1143 radeon_irq_set(rdev); trinity_dpm_late_enable()
1145 trinity_release_mutex(rdev); trinity_dpm_late_enable()
1150 void trinity_dpm_disable(struct radeon_device *rdev) trinity_dpm_disable() argument
1152 trinity_acquire_mutex(rdev); trinity_dpm_disable()
1153 if (!trinity_dpm_enabled(rdev)) { trinity_dpm_disable()
1154 trinity_release_mutex(rdev); trinity_dpm_disable()
1157 trinity_dpm_bapm_enable(rdev, false); trinity_dpm_disable()
1158 trinity_disable_clock_power_gating(rdev); trinity_dpm_disable()
1159 sumo_clear_vc(rdev); trinity_dpm_disable()
1160 trinity_wait_for_level_0(rdev); trinity_dpm_disable()
1161 trinity_stop_dpm(rdev); trinity_dpm_disable()
1162 trinity_reset_am(rdev); trinity_dpm_disable()
1163 trinity_release_mutex(rdev); trinity_dpm_disable()
1165 if (rdev->irq.installed && trinity_dpm_disable()
1166 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { trinity_dpm_disable()
1167 rdev->irq.dpm_thermal = false; trinity_dpm_disable()
1168 radeon_irq_set(rdev); trinity_dpm_disable()
1171 trinity_update_current_ps(rdev, rdev->pm.dpm.boot_ps); trinity_dpm_disable()
1174 static void trinity_get_min_sclk_divider(struct radeon_device *rdev) trinity_get_min_sclk_divider() argument
1176 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_get_min_sclk_divider()
1182 static void trinity_setup_nbp_sim(struct radeon_device *rdev, trinity_setup_nbp_sim() argument
1185 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_setup_nbp_sim()
1200 int trinity_dpm_force_performance_level(struct radeon_device *rdev, trinity_dpm_force_performance_level() argument
1203 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_force_performance_level()
1215 ret = trinity_dpm_n_levels_disabled(rdev, ps->num_levels - 1); trinity_dpm_force_performance_level()
1220 ret = trinity_dpm_n_levels_disabled(rdev, 0); trinity_dpm_force_performance_level()
1226 rdev->pm.dpm.forced_level = level; trinity_dpm_force_performance_level()
1231 int trinity_dpm_pre_set_power_state(struct radeon_device *rdev) trinity_dpm_pre_set_power_state() argument
1233 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_pre_set_power_state()
1234 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; trinity_dpm_pre_set_power_state()
1237 trinity_update_requested_ps(rdev, new_ps); trinity_dpm_pre_set_power_state()
1239 trinity_apply_state_adjust_rules(rdev, trinity_dpm_pre_set_power_state()
1246 int trinity_dpm_set_power_state(struct radeon_device *rdev) trinity_dpm_set_power_state() argument
1248 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_set_power_state()
1252 trinity_acquire_mutex(rdev); trinity_dpm_set_power_state()
1255 trinity_dpm_bapm_enable(rdev, rdev->pm.dpm.ac_power); trinity_dpm_set_power_state()
1256 trinity_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); trinity_dpm_set_power_state()
1257 trinity_enable_power_level_0(rdev); trinity_dpm_set_power_state()
1258 trinity_force_level_0(rdev); trinity_dpm_set_power_state()
1259 trinity_wait_for_level_0(rdev); trinity_dpm_set_power_state()
1260 trinity_setup_nbp_sim(rdev, new_ps); trinity_dpm_set_power_state()
1261 trinity_program_power_levels_0_to_n(rdev, new_ps, old_ps); trinity_dpm_set_power_state()
1262 trinity_force_level_0(rdev); trinity_dpm_set_power_state()
1263 trinity_unforce_levels(rdev); trinity_dpm_set_power_state()
1264 trinity_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); trinity_dpm_set_power_state()
1265 trinity_set_vce_clock(rdev, new_ps, old_ps); trinity_dpm_set_power_state()
1267 trinity_release_mutex(rdev); trinity_dpm_set_power_state()
1272 void trinity_dpm_post_set_power_state(struct radeon_device *rdev) trinity_dpm_post_set_power_state() argument
1274 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_post_set_power_state()
1277 trinity_update_current_ps(rdev, new_ps); trinity_dpm_post_set_power_state()
1280 void trinity_dpm_setup_asic(struct radeon_device *rdev) trinity_dpm_setup_asic() argument
1282 trinity_acquire_mutex(rdev); trinity_dpm_setup_asic()
1283 sumo_program_sstp(rdev); trinity_dpm_setup_asic()
1284 sumo_take_smu_control(rdev, true); trinity_dpm_setup_asic()
1285 trinity_get_min_sclk_divider(rdev); trinity_dpm_setup_asic()
1286 trinity_release_mutex(rdev); trinity_dpm_setup_asic()
1290 void trinity_dpm_reset_asic(struct radeon_device *rdev)
1292 struct trinity_power_info *pi = trinity_get_pi(rdev);
1294 trinity_acquire_mutex(rdev);
1296 trinity_enable_power_level_0(rdev);
1297 trinity_force_level_0(rdev);
1298 trinity_wait_for_level_0(rdev);
1299 trinity_program_bootup_state(rdev);
1300 trinity_force_level_0(rdev);
1301 trinity_unforce_levels(rdev);
1303 trinity_release_mutex(rdev);
1307 static u16 trinity_convert_voltage_index_to_value(struct radeon_device *rdev, trinity_convert_voltage_index_to_value() argument
1310 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_convert_voltage_index_to_value()
1311 u32 vid_7bit = sumo_convert_vid2_to_vid7(rdev, &pi->sys_info.vid_mapping_table, vid_2bit); trinity_convert_voltage_index_to_value()
1322 static void trinity_patch_boot_state(struct radeon_device *rdev, trinity_patch_boot_state() argument
1325 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_patch_boot_state()
1333 static u8 trinity_calculate_vce_wm(struct radeon_device *rdev, u32 sclk) trinity_calculate_vce_wm() argument
1340 static void trinity_construct_boot_state(struct radeon_device *rdev) trinity_construct_boot_state() argument
1342 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_construct_boot_state()
1356 static u8 trinity_get_sleep_divider_id_from_clock(struct radeon_device *rdev, trinity_get_sleep_divider_id_from_clock() argument
1359 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_get_sleep_divider_id_from_clock()
1380 static u32 trinity_get_valid_engine_clock(struct radeon_device *rdev, trinity_get_valid_engine_clock() argument
1383 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_get_valid_engine_clock()
1397 static void trinity_patch_thermal_state(struct radeon_device *rdev, trinity_patch_thermal_state() argument
1401 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_patch_thermal_state()
1421 trinity_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, sclk_in_sr); trinity_patch_thermal_state()
1427 trinity_calculate_vce_wm(rdev, ps->levels[0].sclk); trinity_patch_thermal_state()
1430 static u8 trinity_calculate_display_wm(struct radeon_device *rdev, trinity_calculate_display_wm() argument
1450 static u32 trinity_get_uvd_clock_index(struct radeon_device *rdev, trinity_get_uvd_clock_index() argument
1453 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_get_uvd_clock_index()
1469 static void trinity_adjust_uvd_state(struct radeon_device *rdev, trinity_adjust_uvd_state() argument
1473 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_adjust_uvd_state()
1478 high_index = trinity_get_uvd_clock_index(rdev, rps); trinity_adjust_uvd_state()
1503 static int trinity_get_vce_clock_voltage(struct radeon_device *rdev, trinity_get_vce_clock_voltage() argument
1509 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; trinity_get_vce_clock_voltage()
1533 static void trinity_apply_state_adjust_rules(struct radeon_device *rdev, trinity_apply_state_adjust_rules() argument
1539 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_apply_state_adjust_rules()
1546 u32 num_active_displays = rdev->pm.dpm.new_active_crtc_count; trinity_apply_state_adjust_rules()
1549 return trinity_patch_thermal_state(rdev, ps, current_ps); trinity_apply_state_adjust_rules()
1551 trinity_adjust_uvd_state(rdev, new_rps); trinity_apply_state_adjust_rules()
1554 new_rps->evclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].evclk; trinity_apply_state_adjust_rules()
1555 new_rps->ecclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].ecclk; trinity_apply_state_adjust_rules()
1567 trinity_get_valid_engine_clock(rdev, min_sclk); trinity_apply_state_adjust_rules()
1572 if (ps->levels[i].sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) trinity_apply_state_adjust_rules()
1573 ps->levels[i].sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; trinity_apply_state_adjust_rules()
1575 trinity_get_vce_clock_voltage(rdev, new_rps->evclk, new_rps->ecclk, &min_vce_voltage); trinity_apply_state_adjust_rules()
1581 sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, sclk_in_sr); trinity_apply_state_adjust_rules()
1588 trinity_calculate_display_wm(rdev, ps, i); trinity_apply_state_adjust_rules()
1590 trinity_calculate_vce_wm(rdev, ps->levels[0].sclk); trinity_apply_state_adjust_rules()
1618 static void trinity_cleanup_asic(struct radeon_device *rdev) trinity_cleanup_asic() argument
1620 sumo_take_smu_control(rdev, false); trinity_cleanup_asic()
1624 static void trinity_pre_display_configuration_change(struct radeon_device *rdev)
1626 struct trinity_power_info *pi = trinity_get_pi(rdev);
1629 trinity_dce_enable_voltage_adjustment(rdev, false);
1633 static void trinity_add_dccac_value(struct radeon_device *rdev) trinity_add_dccac_value() argument
1636 u32 num_active_displays = rdev->pm.dpm.new_active_crtc_count; trinity_add_dccac_value()
1637 u64 disp_clk = rdev->clock.default_dispclk / 100; trinity_add_dccac_value()
1649 void trinity_dpm_display_configuration_changed(struct radeon_device *rdev) trinity_dpm_display_configuration_changed() argument
1651 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_display_configuration_changed()
1654 trinity_dce_enable_voltage_adjustment(rdev, true); trinity_dpm_display_configuration_changed()
1655 trinity_add_dccac_value(rdev); trinity_dpm_display_configuration_changed()
1679 static void trinity_parse_pplib_non_clock_info(struct radeon_device *rdev, trinity_parse_pplib_non_clock_info() argument
1699 rdev->pm.dpm.boot_ps = rps; trinity_parse_pplib_non_clock_info()
1700 trinity_patch_boot_state(rdev, ps); trinity_parse_pplib_non_clock_info()
1703 rdev->pm.dpm.uvd_ps = rps; trinity_parse_pplib_non_clock_info()
1706 static void trinity_parse_pplib_clock_info(struct radeon_device *rdev, trinity_parse_pplib_clock_info() argument
1710 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_parse_pplib_clock_info()
1728 static int trinity_parse_power_table(struct radeon_device *rdev) trinity_parse_power_table() argument
1730 struct radeon_mode_info *mode_info = &rdev->mode_info; trinity_parse_power_table()
1760 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * trinity_parse_power_table()
1762 if (!rdev->pm.dpm.ps) trinity_parse_power_table()
1771 if (!rdev->pm.power_state[i].clock_info) trinity_parse_power_table()
1775 kfree(rdev->pm.dpm.ps); trinity_parse_power_table()
1778 rdev->pm.dpm.ps[i].ps_priv = ps; trinity_parse_power_table()
1790 trinity_parse_pplib_clock_info(rdev, trinity_parse_power_table()
1791 &rdev->pm.dpm.ps[i], k, trinity_parse_power_table()
1795 trinity_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], trinity_parse_power_table()
1800 rdev->pm.dpm.num_ps = state_array->ucNumEntries; trinity_parse_power_table()
1805 clock_array_index = rdev->pm.dpm.vce_states[i].clk_idx; trinity_parse_power_table()
1810 rdev->pm.dpm.vce_states[i].sclk = sclk; trinity_parse_power_table()
1811 rdev->pm.dpm.vce_states[i].mclk = 0; trinity_parse_power_table()
1825 static u32 trinity_convert_did_to_freq(struct radeon_device *rdev, u8 did) trinity_convert_did_to_freq() argument
1827 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_convert_did_to_freq()
1844 static int trinity_parse_sys_info_table(struct radeon_device *rdev) trinity_parse_sys_info_table() argument
1846 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_parse_sys_info_table()
1847 struct radeon_mode_info *mode_info = &rdev->mode_info; trinity_parse_sys_info_table()
1906 sumo_construct_sclk_voltage_mapping_table(rdev, trinity_parse_sys_info_table()
1909 sumo_construct_vid_mapping_table(rdev, &pi->sys_info.vid_mapping_table, trinity_parse_sys_info_table()
1932 trinity_convert_did_to_freq(rdev, trinity_parse_sys_info_table()
1935 trinity_convert_did_to_freq(rdev, trinity_parse_sys_info_table()
1945 int trinity_dpm_init(struct radeon_device *rdev) trinity_dpm_init() argument
1953 rdev->pm.dpm.priv = pi; trinity_dpm_init()
1965 if (rdev->pdev->subsystem_vendor == 0x1462) trinity_dpm_init()
1985 ret = trinity_parse_sys_info_table(rdev); trinity_dpm_init()
1989 trinity_construct_boot_state(rdev); trinity_dpm_init()
1991 ret = r600_get_platform_caps(rdev); trinity_dpm_init()
1995 ret = r600_parse_extended_power_table(rdev); trinity_dpm_init()
1999 ret = trinity_parse_power_table(rdev); trinity_dpm_init()
2009 void trinity_dpm_print_power_state(struct radeon_device *rdev, trinity_dpm_print_power_state() argument
2022 trinity_convert_voltage_index_to_value(rdev, pl->vddc_index)); trinity_dpm_print_power_state()
2024 r600_dpm_print_ps_status(rdev, rps); trinity_dpm_print_power_state()
2027 void trinity_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, trinity_dpm_debugfs_print_current_performance_level() argument
2030 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_debugfs_print_current_performance_level()
2045 trinity_convert_voltage_index_to_value(rdev, pl->vddc_index)); trinity_dpm_debugfs_print_current_performance_level()
2049 u32 trinity_dpm_get_current_sclk(struct radeon_device *rdev) trinity_dpm_get_current_sclk() argument
2051 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_get_current_sclk()
2067 u32 trinity_dpm_get_current_mclk(struct radeon_device *rdev) trinity_dpm_get_current_mclk() argument
2069 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_get_current_mclk()
2074 void trinity_dpm_fini(struct radeon_device *rdev) trinity_dpm_fini() argument
2078 trinity_cleanup_asic(rdev); /* ??? */ trinity_dpm_fini()
2080 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { trinity_dpm_fini()
2081 kfree(rdev->pm.dpm.ps[i].ps_priv); trinity_dpm_fini()
2083 kfree(rdev->pm.dpm.ps); trinity_dpm_fini()
2084 kfree(rdev->pm.dpm.priv); trinity_dpm_fini()
2085 r600_free_extended_power_table(rdev); trinity_dpm_fini()
2088 u32 trinity_dpm_get_sclk(struct radeon_device *rdev, bool low) trinity_dpm_get_sclk() argument
2090 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_get_sclk()
2099 u32 trinity_dpm_get_mclk(struct radeon_device *rdev, bool low) trinity_dpm_get_mclk() argument
2101 struct trinity_power_info *pi = trinity_get_pi(rdev); trinity_dpm_get_mclk()
H A Dcypress_dpm.c46 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
47 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
49 static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, cypress_enable_bif_dynamic_pcie_gen2() argument
52 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_enable_bif_dynamic_pcie_gen2()
86 static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev, cypress_enable_dynamic_pcie_gen2() argument
89 cypress_enable_bif_dynamic_pcie_gen2(rdev, enable); cypress_enable_dynamic_pcie_gen2()
98 static int cypress_enter_ulp_state(struct radeon_device *rdev)
100 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
119 static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev, cypress_gfx_clock_gating_enable() argument
122 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_gfx_clock_gating_enable()
171 static void cypress_mg_clock_gating_enable(struct radeon_device *rdev, cypress_mg_clock_gating_enable() argument
174 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_mg_clock_gating_enable()
175 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_mg_clock_gating_enable()
180 if (rdev->family == CHIP_CEDAR) cypress_mg_clock_gating_enable()
182 else if (rdev->family == CHIP_REDWOOD) cypress_mg_clock_gating_enable()
220 void cypress_enable_spread_spectrum(struct radeon_device *rdev, cypress_enable_spread_spectrum() argument
223 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_enable_spread_spectrum()
239 void cypress_start_dpm(struct radeon_device *rdev) cypress_start_dpm() argument
244 void cypress_enable_sclk_control(struct radeon_device *rdev, cypress_enable_sclk_control() argument
253 void cypress_enable_mclk_control(struct radeon_device *rdev, cypress_enable_mclk_control() argument
262 int cypress_notify_smc_display_change(struct radeon_device *rdev, cypress_notify_smc_display_change() argument
268 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK) cypress_notify_smc_display_change()
274 void cypress_program_response_times(struct radeon_device *rdev) cypress_program_response_times() argument
279 reference_clock = radeon_get_xclk(rdev); cypress_program_response_times()
282 rv770_write_smc_soft_register(rdev, cypress_program_response_times()
286 rv770_write_smc_soft_register(rdev, cypress_program_response_times()
289 rv770_write_smc_soft_register(rdev, cypress_program_response_times()
292 rv770_program_response_times(rdev); cypress_program_response_times()
294 if (ASIC_IS_LOMBOK(rdev)) cypress_program_response_times()
295 rv770_write_smc_soft_register(rdev, cypress_program_response_times()
300 static int cypress_pcie_performance_request(struct radeon_device *rdev, cypress_pcie_performance_request() argument
304 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_pcie_performance_request()
317 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); cypress_pcie_performance_request()
321 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); cypress_pcie_performance_request()
328 void cypress_advertise_gen2_capability(struct radeon_device *rdev) cypress_advertise_gen2_capability() argument
330 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_advertise_gen2_capability()
334 radeon_acpi_pcie_notify_device_ready(rdev); cypress_advertise_gen2_capability()
346 cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true); cypress_advertise_gen2_capability()
359 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev, cypress_notify_link_speed_change_after_state_change() argument
377 cypress_pcie_performance_request(rdev, request, false); cypress_notify_link_speed_change_after_state_change()
381 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev, cypress_notify_link_speed_change_before_state_change() argument
399 cypress_pcie_performance_request(rdev, request, false); cypress_notify_link_speed_change_before_state_change()
403 static int cypress_populate_voltage_value(struct radeon_device *rdev, cypress_populate_voltage_value() argument
423 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk) cypress_get_strobe_mode_settings() argument
425 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_get_strobe_mode_settings()
432 result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode); cypress_get_strobe_mode_settings()
441 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf) cypress_map_clkf_to_ibias() argument
443 u32 ref_clk = rdev->clock.mpll.reference_freq; cypress_map_clkf_to_ibias()
473 static int cypress_populate_mclk_value(struct radeon_device *rdev, cypress_populate_mclk_value() argument
478 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_mclk_value()
500 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, cypress_populate_mclk_value()
512 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div); cypress_populate_mclk_value()
557 if (radeon_atombios_get_asic_ss_info(rdev, &ss, cypress_populate_mclk_value()
559 u32 reference_clock = rdev->clock.mpll.reference_freq; cypress_populate_mclk_value()
610 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev, cypress_get_mclk_frequency_ratio() argument
615 if (rdev->family >= CHIP_BARTS) { cypress_get_mclk_frequency_ratio()
651 static int cypress_populate_mvdd_value(struct radeon_device *rdev, cypress_populate_mvdd_value() argument
655 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_mvdd_value()
656 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_mvdd_value()
675 int cypress_convert_power_level_to_smc(struct radeon_device *rdev, cypress_convert_power_level_to_smc() argument
680 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_convert_power_level_to_smc()
681 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_convert_power_level_to_smc()
691 ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk); cypress_convert_power_level_to_smc()
713 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk); cypress_convert_power_level_to_smc()
716 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >= cypress_convert_power_level_to_smc()
724 ret = cypress_populate_mclk_value(rdev, cypress_convert_power_level_to_smc()
731 ret = cypress_populate_mclk_value(rdev, cypress_convert_power_level_to_smc()
741 ret = cypress_populate_voltage_value(rdev, cypress_convert_power_level_to_smc()
749 ret = cypress_populate_voltage_value(rdev, cypress_convert_power_level_to_smc()
757 ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); cypress_convert_power_level_to_smc()
762 static int cypress_convert_power_state_to_smc(struct radeon_device *rdev, cypress_convert_power_state_to_smc() argument
767 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_convert_power_state_to_smc()
773 ret = cypress_convert_power_level_to_smc(rdev, cypress_convert_power_state_to_smc()
780 ret = cypress_convert_power_level_to_smc(rdev, cypress_convert_power_state_to_smc()
787 ret = cypress_convert_power_level_to_smc(rdev, cypress_convert_power_state_to_smc()
808 rv770_populate_smc_sp(rdev, radeon_state, smc_state); cypress_convert_power_state_to_smc()
810 return rv770_populate_smc_t(rdev, radeon_state, smc_state); cypress_convert_power_state_to_smc()
827 static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, cypress_convert_mc_reg_table_entry_to_smc() argument
831 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_convert_mc_reg_table_entry_to_smc()
849 static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev, cypress_convert_mc_reg_table_to_smc() argument
855 cypress_convert_mc_reg_table_entry_to_smc(rdev, cypress_convert_mc_reg_table_to_smc()
858 cypress_convert_mc_reg_table_entry_to_smc(rdev, cypress_convert_mc_reg_table_to_smc()
861 cypress_convert_mc_reg_table_entry_to_smc(rdev, cypress_convert_mc_reg_table_to_smc()
866 int cypress_upload_sw_state(struct radeon_device *rdev, cypress_upload_sw_state() argument
869 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_upload_sw_state()
875 ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state); cypress_upload_sw_state()
879 return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state, cypress_upload_sw_state()
884 int cypress_upload_mc_reg_table(struct radeon_device *rdev, cypress_upload_mc_reg_table() argument
887 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_upload_mc_reg_table()
888 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_upload_mc_reg_table()
892 cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table); cypress_upload_mc_reg_table()
897 return rv770_copy_bytes_to_smc(rdev, address, cypress_upload_mc_reg_table()
903 u32 cypress_calculate_burst_time(struct radeon_device *rdev, cypress_calculate_burst_time() argument
906 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_calculate_burst_time()
924 void cypress_program_memory_timing_parameters(struct radeon_device *rdev, cypress_program_memory_timing_parameters() argument
932 mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev, cypress_program_memory_timing_parameters()
935 mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev, cypress_program_memory_timing_parameters()
938 mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev, cypress_program_memory_timing_parameters()
942 rv730_program_memory_timing_parameters(rdev, radeon_new_state); cypress_program_memory_timing_parameters()
947 static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev, cypress_populate_mc_reg_addresses() argument
950 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_mc_reg_addresses()
966 static void cypress_set_mc_reg_address_table(struct radeon_device *rdev) cypress_set_mc_reg_address_table() argument
968 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_set_mc_reg_address_table()
1030 static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev, cypress_retrieve_ac_timing_for_one_entry() argument
1033 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_retrieve_ac_timing_for_one_entry()
1042 static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev, cypress_retrieve_ac_timing_for_all_ranges() argument
1045 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_retrieve_ac_timing_for_all_ranges()
1051 radeon_atom_set_ac_timing(rdev, range_table->mclk[i]); cypress_retrieve_ac_timing_for_all_ranges()
1052 cypress_retrieve_ac_timing_for_one_entry(rdev, cypress_retrieve_ac_timing_for_all_ranges()
1070 static int cypress_initialize_mc_reg_table(struct radeon_device *rdev) cypress_initialize_mc_reg_table() argument
1072 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_initialize_mc_reg_table()
1073 u8 module_index = rv770_get_memory_module_index(rdev); cypress_initialize_mc_reg_table()
1077 ret = radeon_atom_get_mclk_range_table(rdev, cypress_initialize_mc_reg_table()
1083 cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table); cypress_initialize_mc_reg_table()
1088 static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value) cypress_wait_for_mc_sequencer() argument
1093 if ((rdev->family == CHIP_CYPRESS) || cypress_wait_for_mc_sequencer()
1094 (rdev->family == CHIP_HEMLOCK)) cypress_wait_for_mc_sequencer()
1096 else if (rdev->family == CHIP_CEDAR) cypress_wait_for_mc_sequencer()
1100 if ((rdev->family == CHIP_CYPRESS) || cypress_wait_for_mc_sequencer()
1101 (rdev->family == CHIP_HEMLOCK)) { cypress_wait_for_mc_sequencer()
1108 for (j = 0; j < rdev->usec_timeout; j++) { cypress_wait_for_mc_sequencer()
1116 static void cypress_force_mc_use_s1(struct radeon_device *rdev, cypress_force_mc_use_s1() argument
1127 radeon_atom_set_ac_timing(rdev, boot_state->low.mclk); cypress_force_mc_use_s1()
1128 radeon_mc_wait_for_idle(rdev); cypress_force_mc_use_s1()
1130 if ((rdev->family == CHIP_CYPRESS) || cypress_force_mc_use_s1()
1131 (rdev->family == CHIP_HEMLOCK)) { cypress_force_mc_use_s1()
1139 for (i = 0; i < rdev->num_crtc; i++) cypress_force_mc_use_s1()
1140 radeon_wait_for_vblank(rdev, i); cypress_force_mc_use_s1()
1143 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND); cypress_force_mc_use_s1()
1145 strobe_mode = cypress_get_strobe_mode_settings(rdev, cypress_force_mc_use_s1()
1152 for (i = 0; i < rdev->usec_timeout; i++) { cypress_force_mc_use_s1()
1162 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME); cypress_force_mc_use_s1()
1165 static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev) cypress_copy_ac_timing_from_s1_to_s0() argument
1167 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_copy_ac_timing_from_s1_to_s0()
1177 static void cypress_force_mc_use_s0(struct radeon_device *rdev, cypress_force_mc_use_s0() argument
1185 cypress_copy_ac_timing_from_s1_to_s0(rdev); cypress_force_mc_use_s0()
1186 radeon_mc_wait_for_idle(rdev); cypress_force_mc_use_s0()
1188 if ((rdev->family == CHIP_CYPRESS) || cypress_force_mc_use_s0()
1189 (rdev->family == CHIP_HEMLOCK)) { cypress_force_mc_use_s0()
1197 for (i = 0; i < rdev->num_crtc; i++) cypress_force_mc_use_s0()
1198 radeon_wait_for_vblank(rdev, i); cypress_force_mc_use_s0()
1201 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND); cypress_force_mc_use_s0()
1203 strobe_mode = cypress_get_strobe_mode_settings(rdev, cypress_force_mc_use_s0()
1210 for (i = 0; i < rdev->usec_timeout; i++) { cypress_force_mc_use_s0()
1220 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME); cypress_force_mc_use_s0()
1223 static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev, cypress_populate_initial_mvdd_value() argument
1226 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_initial_mvdd_value()
1234 int cypress_populate_smc_initial_state(struct radeon_device *rdev, cypress_populate_smc_initial_state() argument
1239 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_smc_initial_state()
1240 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_smc_initial_state()
1282 cypress_populate_voltage_value(rdev, cypress_populate_smc_initial_state()
1288 cypress_populate_voltage_value(rdev, cypress_populate_smc_initial_state()
1293 cypress_populate_initial_mvdd_value(rdev, cypress_populate_smc_initial_state()
1313 cypress_get_strobe_mode_settings(rdev, cypress_populate_smc_initial_state()
1330 int cypress_populate_smc_acpi_state(struct radeon_device *rdev, cypress_populate_smc_acpi_state() argument
1333 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_smc_acpi_state()
1334 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_smc_acpi_state()
1359 cypress_populate_voltage_value(rdev, cypress_populate_smc_acpi_state()
1375 cypress_populate_voltage_value(rdev, cypress_populate_smc_acpi_state()
1384 cypress_populate_voltage_value(rdev, cypress_populate_smc_acpi_state()
1427 if (rdev->family <= CHIP_HEMLOCK) cypress_populate_smc_acpi_state()
1456 cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); cypress_populate_smc_acpi_state()
1467 static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev, cypress_trim_voltage_table_to_fit_state_table() argument
1483 int cypress_construct_voltage_tables(struct radeon_device *rdev) cypress_construct_voltage_tables() argument
1485 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_construct_voltage_tables()
1488 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0, cypress_construct_voltage_tables()
1494 cypress_trim_voltage_table_to_fit_state_table(rdev, cypress_construct_voltage_tables()
1498 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0, cypress_construct_voltage_tables()
1504 cypress_trim_voltage_table_to_fit_state_table(rdev, cypress_construct_voltage_tables()
1511 static void cypress_populate_smc_voltage_table(struct radeon_device *rdev, cypress_populate_smc_voltage_table() argument
1523 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev, cypress_populate_smc_voltage_tables() argument
1526 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_smc_voltage_tables()
1527 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_smc_voltage_tables()
1531 cypress_populate_smc_voltage_table(rdev, cypress_populate_smc_voltage_tables()
1549 cypress_populate_smc_voltage_table(rdev, cypress_populate_smc_voltage_tables()
1570 int cypress_get_mvdd_configuration(struct radeon_device *rdev) cypress_get_mvdd_configuration() argument
1572 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_get_mvdd_configuration()
1573 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_get_mvdd_configuration()
1593 module_index = rv770_get_memory_module_index(rdev); cypress_get_mvdd_configuration()
1595 if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) { cypress_get_mvdd_configuration()
1611 static int cypress_init_smc_table(struct radeon_device *rdev, cypress_init_smc_table() argument
1614 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_init_smc_table()
1620 cypress_populate_smc_voltage_tables(rdev, table); cypress_init_smc_table()
1622 switch (rdev->pm.int_thermal_type) { cypress_init_smc_table()
1635 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) cypress_init_smc_table()
1638 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) cypress_init_smc_table()
1641 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) cypress_init_smc_table()
1647 ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table); cypress_init_smc_table()
1651 ret = cypress_populate_smc_acpi_state(rdev, table); cypress_init_smc_table()
1657 return rv770_copy_bytes_to_smc(rdev, cypress_init_smc_table()
1663 int cypress_populate_mc_reg_table(struct radeon_device *rdev, cypress_populate_mc_reg_table() argument
1666 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_populate_mc_reg_table()
1667 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_populate_mc_reg_table()
1671 rv770_write_smc_soft_register(rdev, cypress_populate_mc_reg_table()
1674 cypress_populate_mc_reg_addresses(rdev, &mc_reg_table); cypress_populate_mc_reg_table()
1676 cypress_convert_mc_reg_table_entry_to_smc(rdev, cypress_populate_mc_reg_table()
1684 cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table); cypress_populate_mc_reg_table()
1686 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start, cypress_populate_mc_reg_table()
1691 int cypress_get_table_locations(struct radeon_device *rdev) cypress_get_table_locations() argument
1693 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_get_table_locations()
1694 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_get_table_locations()
1698 ret = rv770_read_smc_sram_dword(rdev, cypress_get_table_locations()
1707 ret = rv770_read_smc_sram_dword(rdev, cypress_get_table_locations()
1716 ret = rv770_read_smc_sram_dword(rdev, cypress_get_table_locations()
1728 void cypress_enable_display_gap(struct radeon_device *rdev) cypress_enable_display_gap() argument
1742 static void cypress_program_display_gap(struct radeon_device *rdev) cypress_program_display_gap() argument
1748 if (rdev->pm.dpm.new_active_crtc_count > 0) cypress_program_display_gap()
1753 if (rdev->pm.dpm.new_active_crtc_count > 1) cypress_program_display_gap()
1763 if ((rdev->pm.dpm.new_active_crtc_count > 0) && cypress_program_display_gap()
1764 (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) { cypress_program_display_gap()
1766 for (i = 0; i < rdev->num_crtc; i++) { cypress_program_display_gap()
1767 if (rdev->pm.dpm.new_active_crtcs & (1 << i)) cypress_program_display_gap()
1770 if (i == rdev->num_crtc) cypress_program_display_gap()
1780 cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0); cypress_program_display_gap()
1783 void cypress_dpm_setup_asic(struct radeon_device *rdev) cypress_dpm_setup_asic() argument
1785 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_dpm_setup_asic()
1787 rv740_read_clock_registers(rdev); cypress_dpm_setup_asic()
1788 rv770_read_voltage_smio_registers(rdev); cypress_dpm_setup_asic()
1789 rv770_get_max_vddc(rdev); cypress_dpm_setup_asic()
1790 rv770_get_memory_type(rdev); cypress_dpm_setup_asic()
1796 cypress_advertise_gen2_capability(rdev); cypress_dpm_setup_asic()
1798 rv770_get_pcie_gen2_status(rdev); cypress_dpm_setup_asic()
1800 rv770_enable_acpi_pm(rdev); cypress_dpm_setup_asic()
1803 int cypress_dpm_enable(struct radeon_device *rdev) cypress_dpm_enable() argument
1805 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_dpm_enable()
1806 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_dpm_enable()
1807 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; cypress_dpm_enable()
1811 rv770_restore_cgcg(rdev); cypress_dpm_enable()
1813 if (rv770_dpm_enabled(rdev)) cypress_dpm_enable()
1817 rv770_enable_voltage_control(rdev, true); cypress_dpm_enable()
1818 ret = cypress_construct_voltage_tables(rdev); cypress_dpm_enable()
1826 ret = cypress_get_mvdd_configuration(rdev); cypress_dpm_enable()
1834 cypress_set_mc_reg_address_table(rdev); cypress_dpm_enable()
1835 cypress_force_mc_use_s0(rdev, boot_ps); cypress_dpm_enable()
1836 ret = cypress_initialize_mc_reg_table(rdev); cypress_dpm_enable()
1839 cypress_force_mc_use_s1(rdev, boot_ps); cypress_dpm_enable()
1842 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) cypress_dpm_enable()
1843 rv770_enable_backbias(rdev, true); cypress_dpm_enable()
1846 cypress_enable_spread_spectrum(rdev, true); cypress_dpm_enable()
1849 rv770_enable_thermal_protection(rdev, true); cypress_dpm_enable()
1851 rv770_setup_bsp(rdev); cypress_dpm_enable()
1852 rv770_program_git(rdev); cypress_dpm_enable()
1853 rv770_program_tp(rdev); cypress_dpm_enable()
1854 rv770_program_tpp(rdev); cypress_dpm_enable()
1855 rv770_program_sstp(rdev); cypress_dpm_enable()
1856 rv770_program_engine_speed_parameters(rdev); cypress_dpm_enable()
1857 cypress_enable_display_gap(rdev); cypress_dpm_enable()
1858 rv770_program_vc(rdev); cypress_dpm_enable()
1861 cypress_enable_dynamic_pcie_gen2(rdev, true); cypress_dpm_enable()
1863 ret = rv770_upload_firmware(rdev); cypress_dpm_enable()
1869 ret = cypress_get_table_locations(rdev); cypress_dpm_enable()
1874 ret = cypress_init_smc_table(rdev, boot_ps); cypress_dpm_enable()
1880 ret = cypress_populate_mc_reg_table(rdev, boot_ps); cypress_dpm_enable()
1887 cypress_program_response_times(rdev); cypress_dpm_enable()
1889 r7xx_start_smc(rdev); cypress_dpm_enable()
1891 ret = cypress_notify_smc_display_change(rdev, false); cypress_dpm_enable()
1896 cypress_enable_sclk_control(rdev, true); cypress_dpm_enable()
1899 cypress_enable_mclk_control(rdev, true); cypress_dpm_enable()
1901 cypress_start_dpm(rdev); cypress_dpm_enable()
1904 cypress_gfx_clock_gating_enable(rdev, true); cypress_dpm_enable()
1907 cypress_mg_clock_gating_enable(rdev, true); cypress_dpm_enable()
1909 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); cypress_dpm_enable()
1914 void cypress_dpm_disable(struct radeon_device *rdev) cypress_dpm_disable() argument
1916 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_dpm_disable()
1917 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_dpm_disable()
1918 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; cypress_dpm_disable()
1920 if (!rv770_dpm_enabled(rdev)) cypress_dpm_disable()
1923 rv770_clear_vc(rdev); cypress_dpm_disable()
1926 rv770_enable_thermal_protection(rdev, false); cypress_dpm_disable()
1929 cypress_enable_dynamic_pcie_gen2(rdev, false); cypress_dpm_disable()
1931 if (rdev->irq.installed && cypress_dpm_disable()
1932 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { cypress_dpm_disable()
1933 rdev->irq.dpm_thermal = false; cypress_dpm_disable()
1934 radeon_irq_set(rdev); cypress_dpm_disable()
1938 cypress_gfx_clock_gating_enable(rdev, false); cypress_dpm_disable()
1941 cypress_mg_clock_gating_enable(rdev, false); cypress_dpm_disable()
1943 rv770_stop_dpm(rdev); cypress_dpm_disable()
1944 r7xx_stop_smc(rdev); cypress_dpm_disable()
1946 cypress_enable_spread_spectrum(rdev, false); cypress_dpm_disable()
1949 cypress_force_mc_use_s1(rdev, boot_ps); cypress_dpm_disable()
1951 rv770_reset_smio_status(rdev); cypress_dpm_disable()
1954 int cypress_dpm_set_power_state(struct radeon_device *rdev) cypress_dpm_set_power_state() argument
1956 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); cypress_dpm_set_power_state()
1957 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; cypress_dpm_set_power_state()
1958 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; cypress_dpm_set_power_state()
1961 ret = rv770_restrict_performance_levels_before_switch(rdev); cypress_dpm_set_power_state()
1967 cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps); cypress_dpm_set_power_state()
1969 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); cypress_dpm_set_power_state()
1970 ret = rv770_halt_smc(rdev); cypress_dpm_set_power_state()
1975 ret = cypress_upload_sw_state(rdev, new_ps); cypress_dpm_set_power_state()
1981 ret = cypress_upload_mc_reg_table(rdev, new_ps); cypress_dpm_set_power_state()
1988 cypress_program_memory_timing_parameters(rdev, new_ps); cypress_dpm_set_power_state()
1990 ret = rv770_resume_smc(rdev); cypress_dpm_set_power_state()
1995 ret = rv770_set_sw_state(rdev); cypress_dpm_set_power_state()
2000 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); cypress_dpm_set_power_state()
2003 cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); cypress_dpm_set_power_state()
2009 void cypress_dpm_reset_asic(struct radeon_device *rdev)
2011 rv770_restrict_performance_levels_before_switch(rdev);
2012 rv770_set_boot_state(rdev);
2016 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev) cypress_dpm_display_configuration_changed() argument
2018 cypress_program_display_gap(rdev); cypress_dpm_display_configuration_changed()
2021 int cypress_dpm_init(struct radeon_device *rdev) cypress_dpm_init() argument
2031 rdev->pm.dpm.priv = eg_pi; cypress_dpm_init()
2034 rv770_get_max_vddc(rdev); cypress_dpm_init()
2042 ret = r600_get_platform_caps(rdev); cypress_dpm_init()
2046 ret = rv7xx_parse_power_table(rdev); cypress_dpm_init()
2050 if (rdev->pm.dpm.voltage_response_time == 0) cypress_dpm_init()
2051 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; cypress_dpm_init()
2052 if (rdev->pm.dpm.backbias_response_time == 0) cypress_dpm_init()
2053 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; cypress_dpm_init()
2055 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, cypress_dpm_init()
2072 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); cypress_dpm_init()
2075 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); cypress_dpm_init()
2078 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0); cypress_dpm_init()
2080 rv770_get_engine_memory_ss(rdev); cypress_dpm_init()
2088 if ((rdev->family == CHIP_CYPRESS) || cypress_dpm_init()
2089 (rdev->family == CHIP_HEMLOCK)) cypress_dpm_init()
2101 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) cypress_dpm_init()
2108 if (rdev->flags & RADEON_IS_MOBILITY) cypress_dpm_init()
2122 radeon_acpi_is_pcie_performance_request_supported(rdev); cypress_dpm_init()
2127 if ((rdev->family == CHIP_CYPRESS) || cypress_dpm_init()
2128 (rdev->family == CHIP_HEMLOCK) || cypress_dpm_init()
2129 (rdev->family == CHIP_JUNIPER)) cypress_dpm_init()
2142 void cypress_dpm_fini(struct radeon_device *rdev) cypress_dpm_fini() argument
2146 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { cypress_dpm_fini()
2147 kfree(rdev->pm.dpm.ps[i].ps_priv); cypress_dpm_fini()
2149 kfree(rdev->pm.dpm.ps); cypress_dpm_fini()
2150 kfree(rdev->pm.dpm.priv); cypress_dpm_fini()
2153 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev) cypress_dpm_vblank_too_short() argument
2155 struct rv7xx_power_info *pi = rv770_get_pi(rdev); cypress_dpm_vblank_too_short()
2156 u32 vblank_time = r600_dpm_get_vblank_time(rdev); cypress_dpm_vblank_too_short()
H A Dr300.c55 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg) rv370_pcie_rreg() argument
60 spin_lock_irqsave(&rdev->pcie_idx_lock, flags); rv370_pcie_rreg()
61 WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask)); rv370_pcie_rreg()
63 spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags); rv370_pcie_rreg()
67 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rv370_pcie_wreg() argument
71 spin_lock_irqsave(&rdev->pcie_idx_lock, flags); rv370_pcie_wreg()
72 WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask)); rv370_pcie_wreg()
74 spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags); rv370_pcie_wreg()
80 static int rv370_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
82 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev) rv370_pcie_gart_tlb_flush() argument
114 void rv370_pcie_gart_set_page(struct radeon_device *rdev, unsigned i, rv370_pcie_gart_set_page() argument
117 void __iomem *ptr = rdev->gart.ptr; rv370_pcie_gart_set_page()
125 int rv370_pcie_gart_init(struct radeon_device *rdev) rv370_pcie_gart_init() argument
129 if (rdev->gart.robj) { rv370_pcie_gart_init()
134 r = radeon_gart_init(rdev); rv370_pcie_gart_init()
137 r = rv370_debugfs_pcie_gart_info_init(rdev); rv370_pcie_gart_init()
140 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; rv370_pcie_gart_init()
141 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; rv370_pcie_gart_init()
142 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; rv370_pcie_gart_init()
143 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; rv370_pcie_gart_init()
144 return radeon_gart_table_vram_alloc(rdev); rv370_pcie_gart_init()
147 int rv370_pcie_gart_enable(struct radeon_device *rdev) rv370_pcie_gart_enable() argument
153 if (rdev->gart.robj == NULL) { rv370_pcie_gart_enable()
154 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); rv370_pcie_gart_enable()
157 r = radeon_gart_table_vram_pin(rdev); rv370_pcie_gart_enable()
163 WREG32_PCIE(RADEON_PCIE_TX_GART_START_LO, rdev->mc.gtt_start); rv370_pcie_gart_enable()
164 tmp = rdev->mc.gtt_end & ~RADEON_GPU_PAGE_MASK; rv370_pcie_gart_enable()
168 table_addr = rdev->gart.table_addr; rv370_pcie_gart_enable()
171 WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_LO, rdev->mc.vram_start); rv370_pcie_gart_enable()
179 rv370_pcie_gart_tlb_flush(rdev); rv370_pcie_gart_enable()
181 (unsigned)(rdev->mc.gtt_size >> 20), rv370_pcie_gart_enable()
183 rdev->gart.ready = true; rv370_pcie_gart_enable()
187 void rv370_pcie_gart_disable(struct radeon_device *rdev) rv370_pcie_gart_disable() argument
198 radeon_gart_table_vram_unpin(rdev); rv370_pcie_gart_disable()
201 void rv370_pcie_gart_fini(struct radeon_device *rdev) rv370_pcie_gart_fini() argument
203 radeon_gart_fini(rdev); rv370_pcie_gart_fini()
204 rv370_pcie_gart_disable(rdev); rv370_pcie_gart_fini()
205 radeon_gart_table_vram_free(rdev); rv370_pcie_gart_fini()
208 void r300_fence_ring_emit(struct radeon_device *rdev, r300_fence_ring_emit() argument
211 struct radeon_ring *ring = &rdev->ring[fence->ring]; r300_fence_ring_emit()
231 radeon_ring_write(ring, rdev->config.r300.hdp_cntl | r300_fence_ring_emit()
234 radeon_ring_write(ring, rdev->config.r300.hdp_cntl); r300_fence_ring_emit()
236 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); r300_fence_ring_emit()
242 void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring) r300_ring_start() argument
249 switch(rdev->num_gb_pipes) { r300_ring_start()
265 r = radeon_ring_lock(rdev, ring, 64); r300_ring_start()
329 radeon_ring_unlock_commit(rdev, ring, false); r300_ring_start()
332 static void r300_errata(struct radeon_device *rdev) r300_errata() argument
334 rdev->pll_errata = 0; r300_errata()
336 if (rdev->family == CHIP_R300 && r300_errata()
338 rdev->pll_errata |= CHIP_ERRATA_R300_CG; r300_errata()
342 int r300_mc_wait_for_idle(struct radeon_device *rdev) r300_mc_wait_for_idle() argument
347 for (i = 0; i < rdev->usec_timeout; i++) { r300_mc_wait_for_idle()
358 static void r300_gpu_init(struct radeon_device *rdev) r300_gpu_init() argument
362 if ((rdev->family == CHIP_R300 && rdev->pdev->device != 0x4144) || r300_gpu_init()
363 (rdev->family == CHIP_R350 && rdev->pdev->device != 0x4148)) { r300_gpu_init()
365 rdev->num_gb_pipes = 2; r300_gpu_init()
368 rdev->num_gb_pipes = 1; r300_gpu_init()
370 rdev->num_z_pipes = 1; r300_gpu_init()
372 switch (rdev->num_gb_pipes) { r300_gpu_init()
389 if (r100_gui_wait_for_idle(rdev)) { r300_gpu_init()
401 if (r100_gui_wait_for_idle(rdev)) { r300_gpu_init()
405 if (r300_mc_wait_for_idle(rdev)) { r300_gpu_init()
410 rdev->num_gb_pipes, rdev->num_z_pipes); r300_gpu_init()
413 int r300_asic_reset(struct radeon_device *rdev) r300_asic_reset() argument
423 r100_mc_stop(rdev, &save); r300_asic_reset()
425 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r300_asic_reset()
434 pci_save_state(rdev->pdev); r300_asic_reset()
436 r100_bm_disable(rdev); r300_asic_reset()
444 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r300_asic_reset()
456 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r300_asic_reset()
458 pci_restore_state(rdev->pdev); r300_asic_reset()
459 r100_enable_bm(rdev); r300_asic_reset()
462 dev_err(rdev->dev, "failed to reset GPU\n"); r300_asic_reset()
465 dev_info(rdev->dev, "GPU reset succeed\n"); r300_asic_reset()
466 r100_mc_resume(rdev, &save); r300_asic_reset()
473 void r300_mc_init(struct radeon_device *rdev) r300_mc_init() argument
479 rdev->mc.vram_is_ddr = true; r300_mc_init()
483 case 0: rdev->mc.vram_width = 64; break; r300_mc_init()
484 case 1: rdev->mc.vram_width = 128; break; r300_mc_init()
485 case 2: rdev->mc.vram_width = 256; break; r300_mc_init()
486 default: rdev->mc.vram_width = 128; break; r300_mc_init()
488 r100_vram_init_sizes(rdev); r300_mc_init()
489 base = rdev->mc.aper_base; r300_mc_init()
490 if (rdev->flags & RADEON_IS_IGP) r300_mc_init()
492 radeon_vram_location(rdev, &rdev->mc, base); r300_mc_init()
493 rdev->mc.gtt_base_align = 0; r300_mc_init()
494 if (!(rdev->flags & RADEON_IS_AGP)) r300_mc_init()
495 radeon_gtt_location(rdev, &rdev->mc); r300_mc_init()
496 radeon_update_bandwidth_info(rdev); r300_mc_init()
499 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes) rv370_set_pcie_lanes() argument
503 if (rdev->flags & RADEON_IS_IGP) rv370_set_pcie_lanes()
506 if (!(rdev->flags & RADEON_IS_PCIE)) rv370_set_pcie_lanes()
558 int rv370_get_pcie_lanes(struct radeon_device *rdev) rv370_get_pcie_lanes() argument
562 if (rdev->flags & RADEON_IS_IGP) rv370_get_pcie_lanes()
565 if (!(rdev->flags & RADEON_IS_PCIE)) rv370_get_pcie_lanes()
594 struct radeon_device *rdev = dev->dev_private; rv370_debugfs_pcie_gart_info() local
619 static int rv370_debugfs_pcie_gart_info_init(struct radeon_device *rdev) rv370_debugfs_pcie_gart_info_init() argument
622 return radeon_debugfs_add_files(rdev, rv370_pcie_gart_info_list, 1); rv370_debugfs_pcie_gart_info_init()
749 if (p->rdev->family < CHIP_RV515) r300_packet0_check()
756 if (p->rdev->family < CHIP_RV515) { r300_packet0_check()
765 p->rdev->cmask_filp != p->filp) { r300_packet0_check()
815 if (p->rdev->family < CHIP_RV515) { r300_packet0_check()
965 if (p->rdev->family < CHIP_R420) { r300_packet0_check()
1032 if (p->rdev->family >= CHIP_RV515) { r300_packet0_check()
1099 if (p->rdev->hyperz_filp != p->filp) { r300_packet0_check()
1109 if (p->rdev->hyperz_filp != p->filp) { r300_packet0_check()
1147 if (idx_value && (p->rdev->hyperz_filp != p->filp)) r300_packet0_check()
1151 if (idx_value && (p->rdev->hyperz_filp != p->filp)) r300_packet0_check()
1154 if (p->rdev->family >= CHIP_RV350) r300_packet0_check()
1160 if (p->rdev->family == CHIP_RV530) r300_packet0_check()
1215 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1230 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1237 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1244 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1251 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1258 r = r100_cs_track_check(p->rdev, track); r300_packet3_check()
1265 if (p->rdev->hyperz_filp != p->filp) r300_packet3_check()
1269 if (p->rdev->cmask_filp != p->filp) r300_packet3_check()
1290 r100_cs_track_clear(p->rdev, track); r300_cs_parse()
1301 p->rdev->config.r300.reg_safe_bm, r300_cs_parse()
1302 p->rdev->config.r300.reg_safe_bm_size, r300_cs_parse()
1321 void r300_set_reg_safe(struct radeon_device *rdev) r300_set_reg_safe() argument
1323 rdev->config.r300.reg_safe_bm = r300_reg_safe_bm; r300_set_reg_safe()
1324 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(r300_reg_safe_bm); r300_set_reg_safe()
1327 void r300_mc_program(struct radeon_device *rdev) r300_mc_program() argument
1332 r = r100_debugfs_mc_info_init(rdev); r300_mc_program()
1334 dev_err(rdev->dev, "Failed to create r100_mc debugfs file.\n"); r300_mc_program()
1338 r100_mc_stop(rdev, &save); r300_mc_program()
1339 if (rdev->flags & RADEON_IS_AGP) { r300_mc_program()
1341 S_00014C_MC_AGP_START(rdev->mc.gtt_start >> 16) | r300_mc_program()
1342 S_00014C_MC_AGP_TOP(rdev->mc.gtt_end >> 16)); r300_mc_program()
1343 WREG32(R_000170_AGP_BASE, lower_32_bits(rdev->mc.agp_base)); r300_mc_program()
1345 upper_32_bits(rdev->mc.agp_base) & 0xff); r300_mc_program()
1352 if (r300_mc_wait_for_idle(rdev)) r300_mc_program()
1356 S_000148_MC_FB_START(rdev->mc.vram_start >> 16) | r300_mc_program()
1357 S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16)); r300_mc_program()
1358 r100_mc_resume(rdev, &save); r300_mc_program()
1361 void r300_clock_startup(struct radeon_device *rdev) r300_clock_startup() argument
1366 radeon_legacy_set_clock_gating(rdev, 1); r300_clock_startup()
1370 if ((rdev->family == CHIP_RV350) || (rdev->family == CHIP_RV380)) r300_clock_startup()
1375 static int r300_startup(struct radeon_device *rdev) r300_startup() argument
1380 r100_set_common_regs(rdev); r300_startup()
1382 r300_mc_program(rdev); r300_startup()
1384 r300_clock_startup(rdev); r300_startup()
1386 r300_gpu_init(rdev); r300_startup()
1389 if (rdev->flags & RADEON_IS_PCIE) { r300_startup()
1390 r = rv370_pcie_gart_enable(rdev); r300_startup()
1395 if (rdev->family == CHIP_R300 || r300_startup()
1396 rdev->family == CHIP_R350 || r300_startup()
1397 rdev->family == CHIP_RV350) r300_startup()
1398 r100_enable_bm(rdev); r300_startup()
1400 if (rdev->flags & RADEON_IS_PCI) { r300_startup()
1401 r = r100_pci_gart_enable(rdev); r300_startup()
1407 r = radeon_wb_init(rdev); r300_startup()
1411 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); r300_startup()
1413 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); r300_startup()
1418 if (!rdev->irq.installed) { r300_startup()
1419 r = radeon_irq_kms_init(rdev); r300_startup()
1424 r100_irq_set(rdev); r300_startup()
1425 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); r300_startup()
1427 r = r100_cp_init(rdev, 1024 * 1024); r300_startup()
1429 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); r300_startup()
1433 r = radeon_ib_pool_init(rdev); r300_startup()
1435 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); r300_startup()
1442 int r300_resume(struct radeon_device *rdev) r300_resume() argument
1447 if (rdev->flags & RADEON_IS_PCIE) r300_resume()
1448 rv370_pcie_gart_disable(rdev); r300_resume()
1449 if (rdev->flags & RADEON_IS_PCI) r300_resume()
1450 r100_pci_gart_disable(rdev); r300_resume()
1452 r300_clock_startup(rdev); r300_resume()
1454 if (radeon_asic_reset(rdev)) { r300_resume()
1455 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", r300_resume()
1460 radeon_combios_asic_init(rdev->ddev); r300_resume()
1462 r300_clock_startup(rdev); r300_resume()
1464 radeon_surface_init(rdev); r300_resume()
1466 rdev->accel_working = true; r300_resume()
1467 r = r300_startup(rdev); r300_resume()
1469 rdev->accel_working = false; r300_resume()
1474 int r300_suspend(struct radeon_device *rdev) r300_suspend() argument
1476 radeon_pm_suspend(rdev); r300_suspend()
1477 r100_cp_disable(rdev); r300_suspend()
1478 radeon_wb_disable(rdev); r300_suspend()
1479 r100_irq_disable(rdev); r300_suspend()
1480 if (rdev->flags & RADEON_IS_PCIE) r300_suspend()
1481 rv370_pcie_gart_disable(rdev); r300_suspend()
1482 if (rdev->flags & RADEON_IS_PCI) r300_suspend()
1483 r100_pci_gart_disable(rdev); r300_suspend()
1487 void r300_fini(struct radeon_device *rdev) r300_fini() argument
1489 radeon_pm_fini(rdev); r300_fini()
1490 r100_cp_fini(rdev); r300_fini()
1491 radeon_wb_fini(rdev); r300_fini()
1492 radeon_ib_pool_fini(rdev); r300_fini()
1493 radeon_gem_fini(rdev); r300_fini()
1494 if (rdev->flags & RADEON_IS_PCIE) r300_fini()
1495 rv370_pcie_gart_fini(rdev); r300_fini()
1496 if (rdev->flags & RADEON_IS_PCI) r300_fini()
1497 r100_pci_gart_fini(rdev); r300_fini()
1498 radeon_agp_fini(rdev); r300_fini()
1499 radeon_irq_kms_fini(rdev); r300_fini()
1500 radeon_fence_driver_fini(rdev); r300_fini()
1501 radeon_bo_fini(rdev); r300_fini()
1502 radeon_atombios_fini(rdev); r300_fini()
1503 kfree(rdev->bios); r300_fini()
1504 rdev->bios = NULL; r300_fini()
1507 int r300_init(struct radeon_device *rdev) r300_init() argument
1512 r100_vga_render_disable(rdev); r300_init()
1514 radeon_scratch_init(rdev); r300_init()
1516 radeon_surface_init(rdev); r300_init()
1519 r100_restore_sanity(rdev); r300_init()
1521 if (!radeon_get_bios(rdev)) { r300_init()
1522 if (ASIC_IS_AVIVO(rdev)) r300_init()
1525 if (rdev->is_atom_bios) { r300_init()
1526 dev_err(rdev->dev, "Expecting combios for RS400/RS480 GPU\n"); r300_init()
1529 r = radeon_combios_init(rdev); r300_init()
1534 if (radeon_asic_reset(rdev)) { r300_init()
1535 dev_warn(rdev->dev, r300_init()
1541 if (radeon_boot_test_post_card(rdev) == false) r300_init()
1544 r300_errata(rdev); r300_init()
1546 radeon_get_clock_info(rdev->ddev); r300_init()
1548 if (rdev->flags & RADEON_IS_AGP) { r300_init()
1549 r = radeon_agp_init(rdev); r300_init()
1551 radeon_agp_disable(rdev); r300_init()
1555 r300_mc_init(rdev); r300_init()
1557 r = radeon_fence_driver_init(rdev); r300_init()
1561 r = radeon_bo_init(rdev); r300_init()
1564 if (rdev->flags & RADEON_IS_PCIE) { r300_init()
1565 r = rv370_pcie_gart_init(rdev); r300_init()
1569 if (rdev->flags & RADEON_IS_PCI) { r300_init()
1570 r = r100_pci_gart_init(rdev); r300_init()
1574 r300_set_reg_safe(rdev); r300_init()
1577 radeon_pm_init(rdev); r300_init()
1579 rdev->accel_working = true; r300_init()
1580 r = r300_startup(rdev); r300_init()
1583 dev_err(rdev->dev, "Disabling GPU acceleration\n"); r300_init()
1584 r100_cp_fini(rdev); r300_init()
1585 radeon_wb_fini(rdev); r300_init()
1586 radeon_ib_pool_fini(rdev); r300_init()
1587 radeon_irq_kms_fini(rdev); r300_init()
1588 if (rdev->flags & RADEON_IS_PCIE) r300_init()
1589 rv370_pcie_gart_fini(rdev); r300_init()
1590 if (rdev->flags & RADEON_IS_PCI) r300_init()
1591 r100_pci_gart_fini(rdev); r300_init()
1592 radeon_agp_fini(rdev); r300_init()
1593 rdev->accel_working = false; r300_init()
H A Dr600_dpm.h133 void r600_dpm_print_ps_status(struct radeon_device *rdev,
135 u32 r600_dpm_get_vblank_time(struct radeon_device *rdev);
136 u32 r600_dpm_get_vrefresh(struct radeon_device *rdev);
141 void r600_gfx_clockgating_enable(struct radeon_device *rdev, bool enable);
142 void r600_dynamicpm_enable(struct radeon_device *rdev, bool enable);
143 void r600_enable_thermal_protection(struct radeon_device *rdev, bool enable);
144 void r600_enable_acpi_pm(struct radeon_device *rdev);
145 void r600_enable_dynamic_pcie_gen2(struct radeon_device *rdev, bool enable);
146 bool r600_dynamicpm_enabled(struct radeon_device *rdev);
147 void r600_enable_sclk_control(struct radeon_device *rdev, bool enable);
148 void r600_enable_mclk_control(struct radeon_device *rdev, bool enable);
149 void r600_enable_spll_bypass(struct radeon_device *rdev, bool enable);
150 void r600_wait_for_spll_change(struct radeon_device *rdev);
151 void r600_set_bsp(struct radeon_device *rdev, u32 u, u32 p);
152 void r600_set_at(struct radeon_device *rdev,
155 void r600_set_tc(struct radeon_device *rdev, u32 index, u32 u_t, u32 d_t);
156 void r600_select_td(struct radeon_device *rdev, enum r600_td td);
157 void r600_set_vrc(struct radeon_device *rdev, u32 vrv);
158 void r600_set_tpu(struct radeon_device *rdev, u32 u);
159 void r600_set_tpc(struct radeon_device *rdev, u32 c);
160 void r600_set_sstu(struct radeon_device *rdev, u32 u);
161 void r600_set_sst(struct radeon_device *rdev, u32 t);
162 void r600_set_git(struct radeon_device *rdev, u32 t);
163 void r600_set_fctu(struct radeon_device *rdev, u32 u);
164 void r600_set_fct(struct radeon_device *rdev, u32 t);
165 void r600_set_ctxcgtt3d_rphc(struct radeon_device *rdev, u32 p);
166 void r600_set_ctxcgtt3d_rsdc(struct radeon_device *rdev, u32 s);
167 void r600_set_vddc3d_oorsu(struct radeon_device *rdev, u32 u);
168 void r600_set_vddc3d_oorphc(struct radeon_device *rdev, u32 p);
169 void r600_set_vddc3d_oorsdc(struct radeon_device *rdev, u32 s);
170 void r600_set_mpll_lock_time(struct radeon_device *rdev, u32 lock_time);
171 void r600_set_mpll_reset_time(struct radeon_device *rdev, u32 reset_time);
172 void r600_engine_clock_entry_enable(struct radeon_device *rdev,
174 void r600_engine_clock_entry_enable_pulse_skipping(struct radeon_device *rdev,
176 void r600_engine_clock_entry_enable_post_divider(struct radeon_device *rdev,
178 void r600_engine_clock_entry_set_post_divider(struct radeon_device *rdev,
180 void r600_engine_clock_entry_set_reference_divider(struct radeon_device *rdev,
182 void r600_engine_clock_entry_set_feedback_divider(struct radeon_device *rdev,
184 void r600_engine_clock_entry_set_step_time(struct radeon_device *rdev,
186 void r600_vid_rt_set_ssu(struct radeon_device *rdev, u32 u);
187 void r600_vid_rt_set_vru(struct radeon_device *rdev, u32 u);
188 void r600_vid_rt_set_vrt(struct radeon_device *rdev, u32 rt);
189 void r600_voltage_control_enable_pins(struct radeon_device *rdev,
191 void r600_voltage_control_program_voltages(struct radeon_device *rdev,
193 void r600_voltage_control_deactivate_static_control(struct radeon_device *rdev,
195 void r600_power_level_enable(struct radeon_device *rdev,
197 void r600_power_level_set_voltage_index(struct radeon_device *rdev,
199 void r600_power_level_set_mem_clock_index(struct radeon_device *rdev,
201 void r600_power_level_set_eng_clock_index(struct radeon_device *rdev,
203 void r600_power_level_set_watermark_id(struct radeon_device *rdev,
206 void r600_power_level_set_pcie_gen2(struct radeon_device *rdev,
208 enum r600_power_level r600_power_level_get_current_index(struct radeon_device *rdev);
209 enum r600_power_level r600_power_level_get_target_index(struct radeon_device *rdev);
210 void r600_power_level_set_enter_index(struct radeon_device *rdev,
212 void r600_wait_for_power_level_unequal(struct radeon_device *rdev,
214 void r600_wait_for_power_level(struct radeon_device *rdev,
216 void r600_start_dpm(struct radeon_device *rdev);
217 void r600_stop_dpm(struct radeon_device *rdev);
221 int r600_get_platform_caps(struct radeon_device *rdev);
223 int r600_parse_extended_power_table(struct radeon_device *rdev);
224 void r600_free_extended_power_table(struct radeon_device *rdev);
226 enum radeon_pcie_gen r600_get_pcie_gen_support(struct radeon_device *rdev,
231 u16 r600_get_pcie_lane_support(struct radeon_device *rdev,
H A Drv770_dpm.c55 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev) rv770_get_pi() argument
57 struct rv7xx_power_info *pi = rdev->pm.dpm.priv; rv770_get_pi()
62 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev) evergreen_get_pi() argument
64 struct evergreen_power_info *pi = rdev->pm.dpm.priv; evergreen_get_pi()
69 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, rv770_enable_bif_dynamic_pcie_gen2() argument
72 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_enable_bif_dynamic_pcie_gen2()
92 static void rv770_enable_l0s(struct radeon_device *rdev) rv770_enable_l0s() argument
101 static void rv770_enable_l1(struct radeon_device *rdev) rv770_enable_l1() argument
113 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev) rv770_enable_pll_sleep_in_l1() argument
130 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev, rv770_gfx_clock_gating_enable() argument
143 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev, rv770_mg_clock_gating_enable() argument
146 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_mg_clock_gating_enable()
151 if (rdev->family == CHIP_RV770) rv770_mg_clock_gating_enable()
167 void rv770_restore_cgcg(struct radeon_device *rdev) rv770_restore_cgcg() argument
180 static void rv770_start_dpm(struct radeon_device *rdev) rv770_start_dpm() argument
189 void rv770_stop_dpm(struct radeon_device *rdev) rv770_stop_dpm() argument
193 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled); rv770_stop_dpm()
205 bool rv770_dpm_enabled(struct radeon_device *rdev) rv770_dpm_enabled() argument
213 void rv770_enable_thermal_protection(struct radeon_device *rdev, rv770_enable_thermal_protection() argument
222 void rv770_enable_acpi_pm(struct radeon_device *rdev) rv770_enable_acpi_pm() argument
227 u8 rv770_get_seq_value(struct radeon_device *rdev, rv770_get_seq_value() argument
235 int rv770_read_smc_soft_register(struct radeon_device *rdev,
238 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
240 return rv770_read_smc_sram_dword(rdev,
246 int rv770_write_smc_soft_register(struct radeon_device *rdev, rv770_write_smc_soft_register() argument
249 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_write_smc_soft_register()
251 return rv770_write_smc_sram_dword(rdev, rv770_write_smc_soft_register()
256 int rv770_populate_smc_t(struct radeon_device *rdev, rv770_populate_smc_t() argument
261 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_t()
302 int rv770_populate_smc_sp(struct radeon_device *rdev, rv770_populate_smc_sp() argument
306 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_sp()
371 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf) rv770_map_clkf_to_ibias() argument
386 static int rv770_populate_mclk_value(struct radeon_device *rdev, rv770_populate_mclk_value() argument
390 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_mclk_value()
404 u32 reference_clock = rdev->clock.mpll.reference_freq; rv770_populate_mclk_value()
410 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, rv770_populate_mclk_value()
426 ibias = rv770_map_clkf_to_ibias(rdev, clkf); rv770_populate_mclk_value()
450 ibias = rv770_map_clkf_to_ibias(rdev, clkf); rv770_populate_mclk_value()
484 static int rv770_populate_sclk_value(struct radeon_device *rdev, rv770_populate_sclk_value() argument
488 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_sclk_value()
501 u32 reference_clock = rdev->clock.spll.reference_freq; rv770_populate_sclk_value()
506 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv770_populate_sclk_value()
542 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv770_populate_sclk_value()
566 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc, rv770_populate_vddc_value() argument
569 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_vddc_value()
592 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk, rv770_populate_mvdd_value() argument
595 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_mvdd_value()
614 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev, rv770_convert_power_level_to_smc() argument
619 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_convert_power_level_to_smc()
628 if (rdev->family == CHIP_RV740) rv770_convert_power_level_to_smc()
629 ret = rv740_populate_sclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
631 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_convert_power_level_to_smc()
632 ret = rv730_populate_sclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
635 ret = rv770_populate_sclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
640 if (rdev->family == CHIP_RV740) { rv770_convert_power_level_to_smc()
653 ret = rv740_populate_mclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
655 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_convert_power_level_to_smc()
656 ret = rv730_populate_mclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
659 ret = rv770_populate_mclk_value(rdev, pl->sclk, rv770_convert_power_level_to_smc()
664 ret = rv770_populate_vddc_value(rdev, pl->vddc, rv770_convert_power_level_to_smc()
669 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); rv770_convert_power_level_to_smc()
674 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev, rv770_convert_power_state_to_smc() argument
684 ret = rv770_convert_power_level_to_smc(rdev, rv770_convert_power_state_to_smc()
691 ret = rv770_convert_power_level_to_smc(rdev, rv770_convert_power_state_to_smc()
698 ret = rv770_convert_power_level_to_smc(rdev, rv770_convert_power_state_to_smc()
709 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev, rv770_convert_power_state_to_smc()
711 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev, rv770_convert_power_state_to_smc()
713 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev, rv770_convert_power_state_to_smc()
716 rv770_populate_smc_sp(rdev, radeon_state, smc_state); rv770_convert_power_state_to_smc()
718 return rv770_populate_smc_t(rdev, radeon_state, smc_state); rv770_convert_power_state_to_smc()
722 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev, rv770_calculate_memory_refresh_rate() argument
739 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev, rv770_program_memory_timing_parameters() argument
743 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_program_memory_timing_parameters()
753 radeon_atom_set_engine_dram_timings(rdev, high_clock, rv770_program_memory_timing_parameters()
764 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) | rv770_program_memory_timing_parameters()
765 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) | rv770_program_memory_timing_parameters()
766 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) | rv770_program_memory_timing_parameters()
767 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk)); rv770_program_memory_timing_parameters()
771 void rv770_enable_backbias(struct radeon_device *rdev, rv770_enable_backbias() argument
780 static void rv770_enable_spread_spectrum(struct radeon_device *rdev, rv770_enable_spread_spectrum() argument
783 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_enable_spread_spectrum()
790 if (rdev->family == CHIP_RV740) rv770_enable_spread_spectrum()
791 rv740_enable_mclk_spread_spectrum(rdev, true); rv770_enable_spread_spectrum()
800 if (rdev->family == CHIP_RV740) rv770_enable_spread_spectrum()
801 rv740_enable_mclk_spread_spectrum(rdev, false); rv770_enable_spread_spectrum()
805 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev) rv770_program_mpll_timing_parameters() argument
807 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_program_mpll_timing_parameters()
809 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) { rv770_program_mpll_timing_parameters()
816 void rv770_setup_bsp(struct radeon_device *rdev) rv770_setup_bsp() argument
818 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_setup_bsp()
819 u32 xclk = radeon_get_xclk(rdev); rv770_setup_bsp()
840 void rv770_program_git(struct radeon_device *rdev) rv770_program_git() argument
845 void rv770_program_tp(struct radeon_device *rdev) rv770_program_tp() argument
863 void rv770_program_tpp(struct radeon_device *rdev) rv770_program_tpp() argument
868 void rv770_program_sstp(struct radeon_device *rdev) rv770_program_sstp() argument
873 void rv770_program_engine_speed_parameters(struct radeon_device *rdev) rv770_program_engine_speed_parameters() argument
878 static void rv770_enable_display_gap(struct radeon_device *rdev) rv770_enable_display_gap() argument
888 void rv770_program_vc(struct radeon_device *rdev) rv770_program_vc() argument
890 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_program_vc()
895 void rv770_clear_vc(struct radeon_device *rdev) rv770_clear_vc() argument
900 int rv770_upload_firmware(struct radeon_device *rdev) rv770_upload_firmware() argument
902 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_upload_firmware()
905 rv770_reset_smc(rdev); rv770_upload_firmware()
906 rv770_stop_smc_clock(rdev); rv770_upload_firmware()
908 ret = rv770_load_smc_ucode(rdev, pi->sram_end); rv770_upload_firmware()
915 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev, rv770_populate_smc_acpi_state() argument
918 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_acpi_state()
942 rv770_populate_vddc_value(rdev, pi->acpi_vddc, rv770_populate_smc_acpi_state()
956 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table, rv770_populate_smc_acpi_state()
998 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); rv770_populate_smc_acpi_state()
1006 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev, rv770_populate_initial_mvdd_value() argument
1009 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_initial_mvdd_value()
1023 static int rv770_populate_smc_initial_state(struct radeon_device *rdev, rv770_populate_smc_initial_state() argument
1028 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_initial_state()
1069 rv770_get_seq_value(rdev, &initial_state->low); rv770_populate_smc_initial_state()
1071 rv770_populate_vddc_value(rdev, rv770_populate_smc_initial_state()
1074 rv770_populate_initial_mvdd_value(rdev, rv770_populate_smc_initial_state()
1091 if (rdev->family == CHIP_RV740) { rv770_populate_smc_initial_state()
1114 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev, rv770_populate_smc_vddc_table() argument
1117 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_vddc_table()
1143 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev, rv770_populate_smc_mvdd_table() argument
1146 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_populate_smc_mvdd_table()
1162 static int rv770_init_smc_table(struct radeon_device *rdev, rv770_init_smc_table() argument
1165 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_init_smc_table()
1174 rv770_populate_smc_vddc_table(rdev, table); rv770_init_smc_table()
1175 rv770_populate_smc_mvdd_table(rdev, table); rv770_init_smc_table()
1177 switch (rdev->pm.int_thermal_type) { rv770_init_smc_table()
1191 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) { rv770_init_smc_table()
1194 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT) rv770_init_smc_table()
1197 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT) rv770_init_smc_table()
1201 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) rv770_init_smc_table()
1207 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_init_smc_table()
1208 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table); rv770_init_smc_table()
1210 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table); rv770_init_smc_table()
1214 if (rdev->family == CHIP_RV740) rv770_init_smc_table()
1215 ret = rv740_populate_smc_acpi_state(rdev, table); rv770_init_smc_table()
1216 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_init_smc_table()
1217 ret = rv730_populate_smc_acpi_state(rdev, table); rv770_init_smc_table()
1219 ret = rv770_populate_smc_acpi_state(rdev, table); rv770_init_smc_table()
1225 return rv770_copy_bytes_to_smc(rdev, rv770_init_smc_table()
1232 static int rv770_construct_vddc_table(struct radeon_device *rdev) rv770_construct_vddc_table() argument
1234 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_construct_vddc_table()
1240 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min); rv770_construct_vddc_table()
1241 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max); rv770_construct_vddc_table()
1242 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step); rv770_construct_vddc_table()
1253 radeon_atom_get_voltage_gpio_settings(rdev, rv770_construct_vddc_table()
1283 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev) rv770_get_mvdd_pin_configuration() argument
1285 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_mvdd_pin_configuration()
1288 radeon_atom_get_voltage_gpio_settings(rdev, rv770_get_mvdd_pin_configuration()
1295 radeon_atom_get_voltage_gpio_settings(rdev, rv770_get_mvdd_pin_configuration()
1304 u8 rv770_get_memory_module_index(struct radeon_device *rdev) rv770_get_memory_module_index() argument
1309 static int rv770_get_mvdd_configuration(struct radeon_device *rdev) rv770_get_mvdd_configuration() argument
1311 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_mvdd_configuration()
1315 memory_module_index = rv770_get_memory_module_index(rdev); rv770_get_mvdd_configuration()
1317 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) { rv770_get_mvdd_configuration()
1330 return rv770_get_mvdd_pin_configuration(rdev); rv770_get_mvdd_configuration()
1333 void rv770_enable_voltage_control(struct radeon_device *rdev, rv770_enable_voltage_control() argument
1342 static void rv770_program_display_gap(struct radeon_device *rdev) rv770_program_display_gap() argument
1347 if (rdev->pm.dpm.new_active_crtcs & 1) { rv770_program_display_gap()
1350 } else if (rdev->pm.dpm.new_active_crtcs & 2) { rv770_program_display_gap()
1360 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev, rv770_enable_dynamic_pcie_gen2() argument
1363 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable); rv770_enable_dynamic_pcie_gen2()
1371 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev, r7xx_program_memory_timing_parameters() argument
1374 if ((rdev->family == CHIP_RV730) || r7xx_program_memory_timing_parameters()
1375 (rdev->family == CHIP_RV710) || r7xx_program_memory_timing_parameters()
1376 (rdev->family == CHIP_RV740)) r7xx_program_memory_timing_parameters()
1377 rv730_program_memory_timing_parameters(rdev, radeon_new_state); r7xx_program_memory_timing_parameters()
1379 rv770_program_memory_timing_parameters(rdev, radeon_new_state); r7xx_program_memory_timing_parameters()
1382 static int rv770_upload_sw_state(struct radeon_device *rdev, rv770_upload_sw_state() argument
1385 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_upload_sw_state()
1391 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state); rv770_upload_sw_state()
1395 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state, rv770_upload_sw_state()
1400 int rv770_halt_smc(struct radeon_device *rdev) rv770_halt_smc() argument
1402 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK) rv770_halt_smc()
1405 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK) rv770_halt_smc()
1411 int rv770_resume_smc(struct radeon_device *rdev) rv770_resume_smc() argument
1413 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK) rv770_resume_smc()
1418 int rv770_set_sw_state(struct radeon_device *rdev) rv770_set_sw_state() argument
1420 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK) rv770_set_sw_state()
1425 int rv770_set_boot_state(struct radeon_device *rdev) rv770_set_boot_state() argument
1427 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK) rv770_set_boot_state()
1432 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, rv770_set_uvd_clock_before_set_eng_clock() argument
1446 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rv770_set_uvd_clock_before_set_eng_clock()
1449 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, rv770_set_uvd_clock_after_set_eng_clock() argument
1463 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rv770_set_uvd_clock_after_set_eng_clock()
1466 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev) rv770_restrict_performance_levels_before_switch() argument
1468 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK) rv770_restrict_performance_levels_before_switch()
1471 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK) rv770_restrict_performance_levels_before_switch()
1477 int rv770_dpm_force_performance_level(struct radeon_device *rdev, rv770_dpm_force_performance_level() argument
1483 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK) rv770_dpm_force_performance_level()
1487 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) rv770_dpm_force_performance_level()
1491 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) rv770_dpm_force_performance_level()
1496 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK) rv770_dpm_force_performance_level()
1499 rdev->pm.dpm.forced_level = level; rv770_dpm_force_performance_level()
1504 void r7xx_start_smc(struct radeon_device *rdev) r7xx_start_smc() argument
1506 rv770_start_smc(rdev); r7xx_start_smc()
1507 rv770_start_smc_clock(rdev); r7xx_start_smc()
1511 void r7xx_stop_smc(struct radeon_device *rdev) r7xx_stop_smc() argument
1513 rv770_reset_smc(rdev); r7xx_stop_smc()
1514 rv770_stop_smc_clock(rdev); r7xx_stop_smc()
1517 static void rv770_read_clock_registers(struct radeon_device *rdev) rv770_read_clock_registers() argument
1519 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_read_clock_registers()
1544 static void r7xx_read_clock_registers(struct radeon_device *rdev) r7xx_read_clock_registers() argument
1546 if (rdev->family == CHIP_RV740) r7xx_read_clock_registers()
1547 rv740_read_clock_registers(rdev); r7xx_read_clock_registers()
1548 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) r7xx_read_clock_registers()
1549 rv730_read_clock_registers(rdev); r7xx_read_clock_registers()
1551 rv770_read_clock_registers(rdev); r7xx_read_clock_registers()
1554 void rv770_read_voltage_smio_registers(struct radeon_device *rdev) rv770_read_voltage_smio_registers() argument
1556 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_read_voltage_smio_registers()
1562 void rv770_reset_smio_status(struct radeon_device *rdev) rv770_reset_smio_status() argument
1564 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_reset_smio_status()
1590 void rv770_get_memory_type(struct radeon_device *rdev) rv770_get_memory_type() argument
1592 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_memory_type()
1605 void rv770_get_pcie_gen2_status(struct radeon_device *rdev) rv770_get_pcie_gen2_status() argument
1607 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_pcie_gen2_status()
1628 static int rv770_enter_ulp_state(struct radeon_device *rdev)
1630 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1647 static int rv770_exit_ulp_state(struct radeon_device *rdev)
1649 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1657 for (i = 0; i < rdev->usec_timeout; i++) {
1670 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev) rv770_get_mclk_odt_threshold() argument
1672 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_mclk_odt_threshold()
1678 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) { rv770_get_mclk_odt_threshold()
1679 memory_module_index = rv770_get_memory_module_index(rdev); rv770_get_mclk_odt_threshold()
1681 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) rv770_get_mclk_odt_threshold()
1690 void rv770_get_max_vddc(struct radeon_device *rdev) rv770_get_max_vddc() argument
1692 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_max_vddc()
1695 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc)) rv770_get_max_vddc()
1701 void rv770_program_response_times(struct radeon_device *rdev) rv770_program_response_times() argument
1708 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time; rv770_program_response_times()
1709 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time; rv770_program_response_times()
1720 reference_clock = radeon_get_xclk(rdev); rv770_program_response_times()
1727 rv770_write_smc_soft_register(rdev, rv770_program_response_times()
1729 rv770_write_smc_soft_register(rdev, rv770_program_response_times()
1731 rv770_write_smc_soft_register(rdev, rv770_program_response_times()
1733 rv770_write_smc_soft_register(rdev, rv770_program_response_times()
1738 rv770_write_smc_soft_register(rdev, rv770_program_response_times()
1744 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev, rv770_program_dcodt_before_state_switch() argument
1748 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_program_dcodt_before_state_switch()
1769 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_program_dcodt_before_state_switch()
1770 rv730_program_dcodt(rdev, new_use_dc); rv770_program_dcodt_before_state_switch()
1773 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev, rv770_program_dcodt_after_state_switch() argument
1777 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_program_dcodt_after_state_switch()
1798 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_program_dcodt_after_state_switch()
1799 rv730_program_dcodt(rdev, new_use_dc); rv770_program_dcodt_after_state_switch()
1802 static void rv770_retrieve_odt_values(struct radeon_device *rdev) rv770_retrieve_odt_values() argument
1804 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_retrieve_odt_values()
1809 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_retrieve_odt_values()
1810 rv730_get_odt_values(rdev); rv770_retrieve_odt_values()
1813 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources) rv770_set_dpm_event_sources() argument
1815 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_set_dpm_event_sources()
1850 void rv770_enable_auto_throttle_source(struct radeon_device *rdev, rv770_enable_auto_throttle_source() argument
1854 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_enable_auto_throttle_source()
1859 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); rv770_enable_auto_throttle_source()
1864 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); rv770_enable_auto_throttle_source()
1869 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev, rv770_set_thermal_temperature_range() argument
1888 rdev->pm.dpm.thermal.min_temp = low_temp; rv770_set_thermal_temperature_range()
1889 rdev->pm.dpm.thermal.max_temp = high_temp; rv770_set_thermal_temperature_range()
1894 int rv770_dpm_enable(struct radeon_device *rdev) rv770_dpm_enable() argument
1896 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_dpm_enable()
1897 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; rv770_dpm_enable()
1901 rv770_restore_cgcg(rdev); rv770_dpm_enable()
1903 if (rv770_dpm_enabled(rdev)) rv770_dpm_enable()
1907 rv770_enable_voltage_control(rdev, true); rv770_dpm_enable()
1908 ret = rv770_construct_vddc_table(rdev); rv770_dpm_enable()
1916 rv770_retrieve_odt_values(rdev); rv770_dpm_enable()
1919 ret = rv770_get_mvdd_configuration(rdev); rv770_dpm_enable()
1926 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) rv770_dpm_enable()
1927 rv770_enable_backbias(rdev, true); rv770_dpm_enable()
1929 rv770_enable_spread_spectrum(rdev, true); rv770_dpm_enable()
1932 rv770_enable_thermal_protection(rdev, true); rv770_dpm_enable()
1934 rv770_program_mpll_timing_parameters(rdev); rv770_dpm_enable()
1935 rv770_setup_bsp(rdev); rv770_dpm_enable()
1936 rv770_program_git(rdev); rv770_dpm_enable()
1937 rv770_program_tp(rdev); rv770_dpm_enable()
1938 rv770_program_tpp(rdev); rv770_dpm_enable()
1939 rv770_program_sstp(rdev); rv770_dpm_enable()
1940 rv770_program_engine_speed_parameters(rdev); rv770_dpm_enable()
1941 rv770_enable_display_gap(rdev); rv770_dpm_enable()
1942 rv770_program_vc(rdev); rv770_dpm_enable()
1945 rv770_enable_dynamic_pcie_gen2(rdev, true); rv770_dpm_enable()
1947 ret = rv770_upload_firmware(rdev); rv770_dpm_enable()
1952 ret = rv770_init_smc_table(rdev, boot_ps); rv770_dpm_enable()
1958 rv770_program_response_times(rdev); rv770_dpm_enable()
1959 r7xx_start_smc(rdev); rv770_dpm_enable()
1961 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_dpm_enable()
1962 rv730_start_dpm(rdev); rv770_dpm_enable()
1964 rv770_start_dpm(rdev); rv770_dpm_enable()
1967 rv770_gfx_clock_gating_enable(rdev, true); rv770_dpm_enable()
1970 rv770_mg_clock_gating_enable(rdev, true); rv770_dpm_enable()
1972 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); rv770_dpm_enable()
1977 int rv770_dpm_late_enable(struct radeon_device *rdev) rv770_dpm_late_enable() argument
1981 if (rdev->irq.installed && rv770_dpm_late_enable()
1982 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { rv770_dpm_late_enable()
1985 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); rv770_dpm_late_enable()
1988 rdev->irq.dpm_thermal = true; rv770_dpm_late_enable()
1989 radeon_irq_set(rdev); rv770_dpm_late_enable()
1990 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt); rv770_dpm_late_enable()
1999 void rv770_dpm_disable(struct radeon_device *rdev) rv770_dpm_disable() argument
2001 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_dpm_disable()
2003 if (!rv770_dpm_enabled(rdev)) rv770_dpm_disable()
2006 rv770_clear_vc(rdev); rv770_dpm_disable()
2009 rv770_enable_thermal_protection(rdev, false); rv770_dpm_disable()
2011 rv770_enable_spread_spectrum(rdev, false); rv770_dpm_disable()
2014 rv770_enable_dynamic_pcie_gen2(rdev, false); rv770_dpm_disable()
2016 if (rdev->irq.installed && rv770_dpm_disable()
2017 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { rv770_dpm_disable()
2018 rdev->irq.dpm_thermal = false; rv770_dpm_disable()
2019 radeon_irq_set(rdev); rv770_dpm_disable()
2023 rv770_gfx_clock_gating_enable(rdev, false); rv770_dpm_disable()
2026 rv770_mg_clock_gating_enable(rdev, false); rv770_dpm_disable()
2028 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) rv770_dpm_disable()
2029 rv730_stop_dpm(rdev); rv770_dpm_disable()
2031 rv770_stop_dpm(rdev); rv770_dpm_disable()
2033 r7xx_stop_smc(rdev); rv770_dpm_disable()
2034 rv770_reset_smio_status(rdev); rv770_dpm_disable()
2037 int rv770_dpm_set_power_state(struct radeon_device *rdev) rv770_dpm_set_power_state() argument
2039 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_dpm_set_power_state()
2040 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; rv770_dpm_set_power_state()
2041 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; rv770_dpm_set_power_state()
2044 ret = rv770_restrict_performance_levels_before_switch(rdev); rv770_dpm_set_power_state()
2049 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); rv770_dpm_set_power_state()
2050 ret = rv770_halt_smc(rdev); rv770_dpm_set_power_state()
2055 ret = rv770_upload_sw_state(rdev, new_ps); rv770_dpm_set_power_state()
2060 r7xx_program_memory_timing_parameters(rdev, new_ps); rv770_dpm_set_power_state()
2062 rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps); rv770_dpm_set_power_state()
2063 ret = rv770_resume_smc(rdev); rv770_dpm_set_power_state()
2068 ret = rv770_set_sw_state(rdev); rv770_dpm_set_power_state()
2074 rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps); rv770_dpm_set_power_state()
2075 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); rv770_dpm_set_power_state()
2081 void rv770_dpm_reset_asic(struct radeon_device *rdev)
2083 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2084 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
2086 rv770_restrict_performance_levels_before_switch(rdev);
2088 rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps);
2089 rv770_set_boot_state(rdev);
2091 rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps);
2095 void rv770_dpm_setup_asic(struct radeon_device *rdev) rv770_dpm_setup_asic() argument
2097 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_dpm_setup_asic()
2099 r7xx_read_clock_registers(rdev); rv770_dpm_setup_asic()
2100 rv770_read_voltage_smio_registers(rdev); rv770_dpm_setup_asic()
2101 rv770_get_memory_type(rdev); rv770_dpm_setup_asic()
2103 rv770_get_mclk_odt_threshold(rdev); rv770_dpm_setup_asic()
2104 rv770_get_pcie_gen2_status(rdev); rv770_dpm_setup_asic()
2106 rv770_enable_acpi_pm(rdev); rv770_dpm_setup_asic()
2109 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s) rv770_dpm_setup_asic()
2110 rv770_enable_l0s(rdev); rv770_dpm_setup_asic()
2111 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1) rv770_dpm_setup_asic()
2112 rv770_enable_l1(rdev); rv770_dpm_setup_asic()
2113 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1) rv770_dpm_setup_asic()
2114 rv770_enable_pll_sleep_in_l1(rdev); rv770_dpm_setup_asic()
2118 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev) rv770_dpm_display_configuration_changed() argument
2120 rv770_program_display_gap(rdev); rv770_dpm_display_configuration_changed()
2144 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev, rv7xx_parse_pplib_non_clock_info() argument
2169 rdev->pm.dpm.boot_ps = rps; rv7xx_parse_pplib_non_clock_info()
2171 rdev->pm.dpm.uvd_ps = rps; rv7xx_parse_pplib_non_clock_info()
2174 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev, rv7xx_parse_pplib_clock_info() argument
2178 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv7xx_parse_pplib_clock_info()
2179 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); rv7xx_parse_pplib_clock_info()
2197 if (rdev->family >= CHIP_CEDAR) { rv7xx_parse_pplib_clock_info()
2227 if (rdev->family >= CHIP_CEDAR) rv7xx_parse_pplib_clock_info()
2236 if (rdev->family >= CHIP_BARTS) { rv7xx_parse_pplib_clock_info()
2251 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); rv7xx_parse_pplib_clock_info()
2252 pl->mclk = rdev->clock.default_mclk; rv7xx_parse_pplib_clock_info()
2253 pl->sclk = rdev->clock.default_sclk; rv7xx_parse_pplib_clock_info()
2260 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk; rv7xx_parse_pplib_clock_info()
2261 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk; rv7xx_parse_pplib_clock_info()
2262 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc; rv7xx_parse_pplib_clock_info()
2263 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci; rv7xx_parse_pplib_clock_info()
2267 int rv7xx_parse_power_table(struct radeon_device *rdev) rv7xx_parse_power_table() argument
2269 struct radeon_mode_info *mode_info = &rdev->mode_info; rv7xx_parse_power_table()
2285 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * rv7xx_parse_power_table()
2287 if (!rdev->pm.dpm.ps) rv7xx_parse_power_table()
2304 kfree(rdev->pm.dpm.ps); rv7xx_parse_power_table()
2307 rdev->pm.dpm.ps[i].ps_priv = ps; rv7xx_parse_power_table()
2308 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], rv7xx_parse_power_table()
2317 rv7xx_parse_pplib_clock_info(rdev, rv7xx_parse_power_table()
2318 &rdev->pm.dpm.ps[i], j, rv7xx_parse_power_table()
2323 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; rv7xx_parse_power_table()
2327 void rv770_get_engine_memory_ss(struct radeon_device *rdev) rv770_get_engine_memory_ss() argument
2329 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv770_get_engine_memory_ss()
2332 pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, rv770_get_engine_memory_ss()
2334 pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, rv770_get_engine_memory_ss()
2343 int rv770_dpm_init(struct radeon_device *rdev) rv770_dpm_init() argument
2352 rdev->pm.dpm.priv = pi; rv770_dpm_init()
2354 rv770_get_max_vddc(rdev); rv770_dpm_init()
2360 ret = r600_get_platform_caps(rdev); rv770_dpm_init()
2364 ret = rv7xx_parse_power_table(rdev); rv770_dpm_init()
2368 if (rdev->pm.dpm.voltage_response_time == 0) rv770_dpm_init()
2369 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; rv770_dpm_init()
2370 if (rdev->pm.dpm.backbias_response_time == 0) rv770_dpm_init()
2371 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; rv770_dpm_init()
2373 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv770_dpm_init()
2389 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); rv770_dpm_init()
2392 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); rv770_dpm_init()
2394 rv770_get_engine_memory_ss(rdev); rv770_dpm_init()
2409 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) rv770_dpm_init()
2416 if (rdev->flags & RADEON_IS_MOBILITY) rv770_dpm_init()
2432 void rv770_dpm_print_power_state(struct radeon_device *rdev, rv770_dpm_print_power_state() argument
2441 if (rdev->family >= CHIP_CEDAR) { rv770_dpm_print_power_state()
2462 r600_dpm_print_ps_status(rdev, rps); rv770_dpm_print_power_state()
2465 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, rv770_dpm_debugfs_print_current_performance_level() argument
2468 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv770_dpm_debugfs_print_current_performance_level()
2485 if (rdev->family >= CHIP_CEDAR) { rv770_dpm_debugfs_print_current_performance_level()
2495 u32 rv770_dpm_get_current_sclk(struct radeon_device *rdev) rv770_dpm_get_current_sclk() argument
2497 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv770_dpm_get_current_sclk()
2517 u32 rv770_dpm_get_current_mclk(struct radeon_device *rdev) rv770_dpm_get_current_mclk() argument
2519 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rv770_dpm_get_current_mclk()
2539 void rv770_dpm_fini(struct radeon_device *rdev) rv770_dpm_fini() argument
2543 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { rv770_dpm_fini()
2544 kfree(rdev->pm.dpm.ps[i].ps_priv); rv770_dpm_fini()
2546 kfree(rdev->pm.dpm.ps); rv770_dpm_fini()
2547 kfree(rdev->pm.dpm.priv); rv770_dpm_fini()
2550 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low) rv770_dpm_get_sclk() argument
2552 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps); rv770_dpm_get_sclk()
2560 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low) rv770_dpm_get_mclk() argument
2562 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps); rv770_dpm_get_mclk()
2570 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev) rv770_dpm_vblank_too_short() argument
2572 u32 vblank_time = r600_dpm_get_vblank_time(rdev); rv770_dpm_vblank_too_short()
2577 if ((rdev->family == CHIP_RV770) && rv770_dpm_vblank_too_short()
2578 !(rdev->flags & RADEON_IS_MOBILITY)) rv770_dpm_vblank_too_short()
H A Drv770_dpm.h179 int rv730_populate_sclk_value(struct radeon_device *rdev,
182 int rv730_populate_mclk_value(struct radeon_device *rdev,
185 void rv730_read_clock_registers(struct radeon_device *rdev);
186 int rv730_populate_smc_acpi_state(struct radeon_device *rdev,
188 int rv730_populate_smc_initial_state(struct radeon_device *rdev,
191 void rv730_program_memory_timing_parameters(struct radeon_device *rdev,
193 void rv730_power_gating_enable(struct radeon_device *rdev,
195 void rv730_start_dpm(struct radeon_device *rdev);
196 void rv730_stop_dpm(struct radeon_device *rdev);
197 void rv730_program_dcodt(struct radeon_device *rdev, bool use_dcodt);
198 void rv730_get_odt_values(struct radeon_device *rdev);
201 int rv740_populate_sclk_value(struct radeon_device *rdev, u32 engine_clock,
203 int rv740_populate_mclk_value(struct radeon_device *rdev,
206 void rv740_read_clock_registers(struct radeon_device *rdev);
207 int rv740_populate_smc_acpi_state(struct radeon_device *rdev,
209 void rv740_enable_mclk_spread_spectrum(struct radeon_device *rdev,
216 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf);
217 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc,
219 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk,
221 u8 rv770_get_seq_value(struct radeon_device *rdev,
223 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev,
225 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev,
227 void rv770_program_response_times(struct radeon_device *rdev);
228 int rv770_populate_smc_sp(struct radeon_device *rdev,
231 int rv770_populate_smc_t(struct radeon_device *rdev,
234 void rv770_read_voltage_smio_registers(struct radeon_device *rdev);
235 void rv770_get_memory_type(struct radeon_device *rdev);
236 void r7xx_start_smc(struct radeon_device *rdev);
237 u8 rv770_get_memory_module_index(struct radeon_device *rdev);
238 void rv770_get_max_vddc(struct radeon_device *rdev);
239 void rv770_get_pcie_gen2_status(struct radeon_device *rdev);
240 void rv770_enable_acpi_pm(struct radeon_device *rdev);
241 void rv770_restore_cgcg(struct radeon_device *rdev);
242 bool rv770_dpm_enabled(struct radeon_device *rdev);
243 void rv770_enable_voltage_control(struct radeon_device *rdev,
245 void rv770_enable_backbias(struct radeon_device *rdev,
247 void rv770_enable_thermal_protection(struct radeon_device *rdev,
249 void rv770_enable_auto_throttle_source(struct radeon_device *rdev,
252 void rv770_setup_bsp(struct radeon_device *rdev);
253 void rv770_program_git(struct radeon_device *rdev);
254 void rv770_program_tp(struct radeon_device *rdev);
255 void rv770_program_tpp(struct radeon_device *rdev);
256 void rv770_program_sstp(struct radeon_device *rdev);
257 void rv770_program_engine_speed_parameters(struct radeon_device *rdev);
258 void rv770_program_vc(struct radeon_device *rdev);
259 void rv770_clear_vc(struct radeon_device *rdev);
260 int rv770_upload_firmware(struct radeon_device *rdev);
261 void rv770_stop_dpm(struct radeon_device *rdev);
262 void r7xx_stop_smc(struct radeon_device *rdev);
263 void rv770_reset_smio_status(struct radeon_device *rdev);
264 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev);
265 int rv770_dpm_force_performance_level(struct radeon_device *rdev,
267 int rv770_halt_smc(struct radeon_device *rdev);
268 int rv770_resume_smc(struct radeon_device *rdev);
269 int rv770_set_sw_state(struct radeon_device *rdev);
270 int rv770_set_boot_state(struct radeon_device *rdev);
271 int rv7xx_parse_power_table(struct radeon_device *rdev);
272 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
275 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
278 void rv770_get_engine_memory_ss(struct radeon_device *rdev);
281 int rv770_write_smc_soft_register(struct radeon_device *rdev,
H A Dci_dpm.c165 extern u8 rv770_get_memory_module_index(struct radeon_device *rdev);
166 extern int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
170 extern void si_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
173 extern void cik_enter_rlc_safe_mode(struct radeon_device *rdev);
174 extern void cik_exit_rlc_safe_mode(struct radeon_device *rdev);
175 extern int ci_mc_load_microcode(struct radeon_device *rdev);
176 extern void cik_update_cg(struct radeon_device *rdev,
179 static int ci_get_std_voltage_value_sidd(struct radeon_device *rdev,
182 static int ci_set_power_limit(struct radeon_device *rdev, u32 n);
183 static int ci_set_overdrive_target_tdp(struct radeon_device *rdev,
185 static int ci_update_uvd_dpm(struct radeon_device *rdev, bool gate);
187 static PPSMC_Result ci_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
190 static void ci_thermal_start_smc_fan_control(struct radeon_device *rdev);
191 static void ci_fan_ctrl_set_default_mode(struct radeon_device *rdev);
193 static struct ci_power_info *ci_get_pi(struct radeon_device *rdev) ci_get_pi() argument
195 struct ci_power_info *pi = rdev->pm.dpm.priv; ci_get_pi()
207 static void ci_initialize_powertune_defaults(struct radeon_device *rdev) ci_initialize_powertune_defaults() argument
209 struct ci_power_info *pi = ci_get_pi(rdev); ci_initialize_powertune_defaults()
211 switch (rdev->pdev->device) { ci_initialize_powertune_defaults()
258 if (rdev->family == CHIP_HAWAII) ci_initialize_powertune_defaults()
272 static int ci_populate_bapm_vddc_vid_sidd(struct radeon_device *rdev) ci_populate_bapm_vddc_vid_sidd() argument
274 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_bapm_vddc_vid_sidd()
280 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries == NULL) ci_populate_bapm_vddc_vid_sidd()
282 if (rdev->pm.dpm.dyn_state.cac_leakage_table.count > 8) ci_populate_bapm_vddc_vid_sidd()
284 if (rdev->pm.dpm.dyn_state.cac_leakage_table.count != ci_populate_bapm_vddc_vid_sidd()
285 rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count) ci_populate_bapm_vddc_vid_sidd()
288 for (i = 0; i < rdev->pm.dpm.dyn_state.cac_leakage_table.count; i++) { ci_populate_bapm_vddc_vid_sidd()
289 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_EVV) { ci_populate_bapm_vddc_vid_sidd()
290 lo_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1); ci_populate_bapm_vddc_vid_sidd()
291 hi_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2); ci_populate_bapm_vddc_vid_sidd()
292 hi2_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3); ci_populate_bapm_vddc_vid_sidd()
294 lo_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc); ci_populate_bapm_vddc_vid_sidd()
295 hi_vid[i] = ci_convert_to_vid((u16)rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage); ci_populate_bapm_vddc_vid_sidd()
301 static int ci_populate_vddc_vid(struct radeon_device *rdev) ci_populate_vddc_vid() argument
303 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_vddc_vid()
316 static int ci_populate_svi_load_line(struct radeon_device *rdev) ci_populate_svi_load_line() argument
318 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_svi_load_line()
329 static int ci_populate_tdc_limit(struct radeon_device *rdev) ci_populate_tdc_limit() argument
331 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_tdc_limit()
335 tdc_limit = rdev->pm.dpm.dyn_state.cac_tdp_table->tdc * 256; ci_populate_tdc_limit()
344 static int ci_populate_dw8(struct radeon_device *rdev) ci_populate_dw8() argument
346 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_dw8()
350 ret = ci_read_smc_sram_dword(rdev, ci_populate_dw8()
364 static int ci_populate_fuzzy_fan(struct radeon_device *rdev) ci_populate_fuzzy_fan() argument
366 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_fuzzy_fan()
368 if ((rdev->pm.dpm.fan.fan_output_sensitivity & (1 << 15)) || ci_populate_fuzzy_fan()
369 (rdev->pm.dpm.fan.fan_output_sensitivity == 0)) ci_populate_fuzzy_fan()
370 rdev->pm.dpm.fan.fan_output_sensitivity = ci_populate_fuzzy_fan()
371 rdev->pm.dpm.fan.default_fan_output_sensitivity; ci_populate_fuzzy_fan()
374 cpu_to_be16(rdev->pm.dpm.fan.fan_output_sensitivity); ci_populate_fuzzy_fan()
379 static int ci_min_max_v_gnbl_pm_lid_from_bapm_vddc(struct radeon_device *rdev) ci_min_max_v_gnbl_pm_lid_from_bapm_vddc() argument
381 struct ci_power_info *pi = ci_get_pi(rdev); ci_min_max_v_gnbl_pm_lid_from_bapm_vddc()
411 static int ci_populate_bapm_vddc_base_leakage_sidd(struct radeon_device *rdev) ci_populate_bapm_vddc_base_leakage_sidd() argument
413 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_bapm_vddc_base_leakage_sidd()
417 rdev->pm.dpm.dyn_state.cac_tdp_table; ci_populate_bapm_vddc_base_leakage_sidd()
428 static int ci_populate_bapm_parameters_in_dpm_table(struct radeon_device *rdev) ci_populate_bapm_parameters_in_dpm_table() argument
430 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_bapm_parameters_in_dpm_table()
434 rdev->pm.dpm.dyn_state.cac_tdp_table; ci_populate_bapm_parameters_in_dpm_table()
435 struct radeon_ppm_table *ppm = rdev->pm.dpm.dyn_state.ppm_table; ci_populate_bapm_parameters_in_dpm_table()
476 static int ci_populate_pm_base(struct radeon_device *rdev) ci_populate_pm_base() argument
478 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_pm_base()
483 ret = ci_read_smc_sram_dword(rdev, ci_populate_pm_base()
489 ret = ci_populate_bapm_vddc_vid_sidd(rdev); ci_populate_pm_base()
492 ret = ci_populate_vddc_vid(rdev); ci_populate_pm_base()
495 ret = ci_populate_svi_load_line(rdev); ci_populate_pm_base()
498 ret = ci_populate_tdc_limit(rdev); ci_populate_pm_base()
501 ret = ci_populate_dw8(rdev); ci_populate_pm_base()
504 ret = ci_populate_fuzzy_fan(rdev); ci_populate_pm_base()
507 ret = ci_min_max_v_gnbl_pm_lid_from_bapm_vddc(rdev); ci_populate_pm_base()
510 ret = ci_populate_bapm_vddc_base_leakage_sidd(rdev); ci_populate_pm_base()
513 ret = ci_copy_bytes_to_smc(rdev, pm_fuse_table_offset, ci_populate_pm_base()
523 static void ci_do_enable_didt(struct radeon_device *rdev, const bool enable) ci_do_enable_didt() argument
525 struct ci_power_info *pi = ci_get_pi(rdev); ci_do_enable_didt()
565 static int ci_program_pt_config_registers(struct radeon_device *rdev, ci_program_pt_config_registers() argument
613 static int ci_enable_didt(struct radeon_device *rdev, bool enable) ci_enable_didt() argument
615 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_didt()
620 cik_enter_rlc_safe_mode(rdev); ci_enable_didt()
623 ret = ci_program_pt_config_registers(rdev, didt_config_ci); ci_enable_didt()
625 cik_exit_rlc_safe_mode(rdev); ci_enable_didt()
630 ci_do_enable_didt(rdev, enable); ci_enable_didt()
632 cik_exit_rlc_safe_mode(rdev); ci_enable_didt()
638 static int ci_enable_power_containment(struct radeon_device *rdev, bool enable) ci_enable_power_containment() argument
640 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_power_containment()
648 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_EnableDTE); ci_enable_power_containment()
656 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_TDCLimitEnable); ci_enable_power_containment()
664 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_PkgPwrLimitEnable); ci_enable_power_containment()
669 rdev->pm.dpm.dyn_state.cac_tdp_table; ci_enable_power_containment()
675 ci_set_power_limit(rdev, default_pwr_limit); ci_enable_power_containment()
682 ci_send_msg_to_smc(rdev, PPSMC_MSG_TDCLimitDisable); ci_enable_power_containment()
685 ci_send_msg_to_smc(rdev, PPSMC_MSG_DisableDTE); ci_enable_power_containment()
688 ci_send_msg_to_smc(rdev, PPSMC_MSG_PkgPwrLimitDisable); ci_enable_power_containment()
696 static int ci_enable_smc_cac(struct radeon_device *rdev, bool enable) ci_enable_smc_cac() argument
698 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_smc_cac()
704 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac); ci_enable_smc_cac()
712 ci_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac); ci_enable_smc_cac()
720 static int ci_enable_thermal_based_sclk_dpm(struct radeon_device *rdev, ci_enable_thermal_based_sclk_dpm() argument
723 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_thermal_based_sclk_dpm()
728 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_ENABLE_THERMAL_DPM); ci_enable_thermal_based_sclk_dpm()
730 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_DISABLE_THERMAL_DPM); ci_enable_thermal_based_sclk_dpm()
739 static int ci_power_control_set_level(struct radeon_device *rdev) ci_power_control_set_level() argument
741 struct ci_power_info *pi = ci_get_pi(rdev); ci_power_control_set_level()
743 rdev->pm.dpm.dyn_state.cac_tdp_table; ci_power_control_set_level()
751 rdev->pm.dpm.tdp_adjustment : (-1 * rdev->pm.dpm.tdp_adjustment); ci_power_control_set_level()
755 ret = ci_set_overdrive_target_tdp(rdev, (u32)target_tdp); ci_power_control_set_level()
761 void ci_dpm_powergate_uvd(struct radeon_device *rdev, bool gate) ci_dpm_powergate_uvd() argument
763 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_powergate_uvd()
770 ci_update_uvd_dpm(rdev, gate); ci_dpm_powergate_uvd()
773 bool ci_dpm_vblank_too_short(struct radeon_device *rdev) ci_dpm_vblank_too_short() argument
775 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_vblank_too_short()
776 u32 vblank_time = r600_dpm_get_vblank_time(rdev); ci_dpm_vblank_too_short()
786 static void ci_apply_state_adjust_rules(struct radeon_device *rdev, ci_apply_state_adjust_rules() argument
790 struct ci_power_info *pi = ci_get_pi(rdev); ci_apply_state_adjust_rules()
797 rps->evclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].evclk; ci_apply_state_adjust_rules()
798 rps->ecclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].ecclk; ci_apply_state_adjust_rules()
804 if ((rdev->pm.dpm.new_active_crtc_count > 1) || ci_apply_state_adjust_rules()
805 ci_dpm_vblank_too_short(rdev)) ci_apply_state_adjust_rules()
815 if (rdev->pm.dpm.ac_power) ci_apply_state_adjust_rules()
816 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ci_apply_state_adjust_rules()
818 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; ci_apply_state_adjust_rules()
820 if (rdev->pm.dpm.ac_power == false) { ci_apply_state_adjust_rules()
840 if (sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) ci_apply_state_adjust_rules()
841 sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; ci_apply_state_adjust_rules()
842 if (mclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk) ci_apply_state_adjust_rules()
843 mclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk; ci_apply_state_adjust_rules()
861 static int ci_thermal_set_temperature_range(struct radeon_device *rdev, ci_thermal_set_temperature_range() argument
891 rdev->pm.dpm.thermal.min_temp = low_temp; ci_thermal_set_temperature_range()
892 rdev->pm.dpm.thermal.max_temp = high_temp; ci_thermal_set_temperature_range()
897 static int ci_thermal_enable_alert(struct radeon_device *rdev, ci_thermal_enable_alert() argument
906 rdev->irq.dpm_thermal = false; ci_thermal_enable_alert()
907 result = ci_send_msg_to_smc(rdev, PPSMC_MSG_Thermal_Cntl_Enable); ci_thermal_enable_alert()
915 rdev->irq.dpm_thermal = true; ci_thermal_enable_alert()
916 result = ci_send_msg_to_smc(rdev, PPSMC_MSG_Thermal_Cntl_Disable); ci_thermal_enable_alert()
926 static void ci_fan_ctrl_set_static_mode(struct radeon_device *rdev, u32 mode) ci_fan_ctrl_set_static_mode() argument
928 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_set_static_mode()
948 static int ci_thermal_setup_fan_table(struct radeon_device *rdev) ci_thermal_setup_fan_table() argument
950 struct ci_power_info *pi = ci_get_pi(rdev); ci_thermal_setup_fan_table()
960 rdev->pm.dpm.fan.ucode_fan_control = false; ci_thermal_setup_fan_table()
967 rdev->pm.dpm.fan.ucode_fan_control = false; ci_thermal_setup_fan_table()
971 tmp64 = (u64)rdev->pm.dpm.fan.pwm_min * duty100; ci_thermal_setup_fan_table()
975 t_diff1 = rdev->pm.dpm.fan.t_med - rdev->pm.dpm.fan.t_min; ci_thermal_setup_fan_table()
976 t_diff2 = rdev->pm.dpm.fan.t_high - rdev->pm.dpm.fan.t_med; ci_thermal_setup_fan_table()
978 pwm_diff1 = rdev->pm.dpm.fan.pwm_med - rdev->pm.dpm.fan.pwm_min; ci_thermal_setup_fan_table()
979 pwm_diff2 = rdev->pm.dpm.fan.pwm_high - rdev->pm.dpm.fan.pwm_med; ci_thermal_setup_fan_table()
984 fan_table.TempMin = cpu_to_be16((50 + rdev->pm.dpm.fan.t_min) / 100); ci_thermal_setup_fan_table()
985 fan_table.TempMed = cpu_to_be16((50 + rdev->pm.dpm.fan.t_med) / 100); ci_thermal_setup_fan_table()
986 fan_table.TempMax = cpu_to_be16((50 + rdev->pm.dpm.fan.t_max) / 100); ci_thermal_setup_fan_table()
993 fan_table.HystDown = cpu_to_be16(rdev->pm.dpm.fan.t_hyst); ci_thermal_setup_fan_table()
1001 reference_clock = radeon_get_xclk(rdev); ci_thermal_setup_fan_table()
1003 fan_table.RefreshPeriod = cpu_to_be32((rdev->pm.dpm.fan.cycle_delay * ci_thermal_setup_fan_table()
1011 ret = ci_copy_bytes_to_smc(rdev, ci_thermal_setup_fan_table()
1019 rdev->pm.dpm.fan.ucode_fan_control = false; ci_thermal_setup_fan_table()
1025 static int ci_fan_ctrl_start_smc_fan_control(struct radeon_device *rdev) ci_fan_ctrl_start_smc_fan_control() argument
1027 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_start_smc_fan_control()
1031 ret = ci_send_msg_to_smc_with_parameter(rdev, ci_fan_ctrl_start_smc_fan_control()
1036 ret = ci_send_msg_to_smc_with_parameter(rdev, ci_fan_ctrl_start_smc_fan_control()
1038 rdev->pm.dpm.fan.default_max_fan_pwm); ci_fan_ctrl_start_smc_fan_control()
1042 ret = ci_send_msg_to_smc_with_parameter(rdev, ci_fan_ctrl_start_smc_fan_control()
1053 static int ci_fan_ctrl_stop_smc_fan_control(struct radeon_device *rdev) ci_fan_ctrl_stop_smc_fan_control() argument
1056 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_stop_smc_fan_control()
1058 ret = ci_send_msg_to_smc(rdev, PPSMC_StopFanControl); ci_fan_ctrl_stop_smc_fan_control()
1066 int ci_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev, ci_fan_ctrl_get_fan_speed_percent() argument
1072 if (rdev->pm.no_fan) ci_fan_ctrl_get_fan_speed_percent()
1091 int ci_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev, ci_fan_ctrl_set_fan_speed_percent() argument
1097 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_set_fan_speed_percent()
1099 if (rdev->pm.no_fan) ci_fan_ctrl_set_fan_speed_percent()
1124 void ci_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode) ci_fan_ctrl_set_mode() argument
1128 if (rdev->pm.dpm.fan.ucode_fan_control) ci_fan_ctrl_set_mode()
1129 ci_fan_ctrl_stop_smc_fan_control(rdev); ci_fan_ctrl_set_mode()
1130 ci_fan_ctrl_set_static_mode(rdev, mode); ci_fan_ctrl_set_mode()
1133 if (rdev->pm.dpm.fan.ucode_fan_control) ci_fan_ctrl_set_mode()
1134 ci_thermal_start_smc_fan_control(rdev); ci_fan_ctrl_set_mode()
1136 ci_fan_ctrl_set_default_mode(rdev); ci_fan_ctrl_set_mode()
1140 u32 ci_fan_ctrl_get_mode(struct radeon_device *rdev) ci_fan_ctrl_get_mode() argument
1142 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_get_mode()
1153 static int ci_fan_ctrl_get_fan_speed_rpm(struct radeon_device *rdev,
1157 u32 xclk = radeon_get_xclk(rdev);
1159 if (rdev->pm.no_fan)
1162 if (rdev->pm.fan_pulses_per_revolution == 0)
1174 static int ci_fan_ctrl_set_fan_speed_rpm(struct radeon_device *rdev,
1178 u32 xclk = radeon_get_xclk(rdev);
1180 if (rdev->pm.no_fan)
1183 if (rdev->pm.fan_pulses_per_revolution == 0)
1186 if ((speed < rdev->pm.fan_min_rpm) ||
1187 (speed > rdev->pm.fan_max_rpm))
1190 if (rdev->pm.dpm.fan.ucode_fan_control)
1191 ci_fan_ctrl_stop_smc_fan_control(rdev);
1198 ci_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC_RPM);
1204 static void ci_fan_ctrl_set_default_mode(struct radeon_device *rdev) ci_fan_ctrl_set_default_mode() argument
1206 struct ci_power_info *pi = ci_get_pi(rdev); ci_fan_ctrl_set_default_mode()
1221 static void ci_thermal_start_smc_fan_control(struct radeon_device *rdev) ci_thermal_start_smc_fan_control() argument
1223 if (rdev->pm.dpm.fan.ucode_fan_control) { ci_thermal_start_smc_fan_control()
1224 ci_fan_ctrl_start_smc_fan_control(rdev); ci_thermal_start_smc_fan_control()
1225 ci_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC); ci_thermal_start_smc_fan_control()
1229 static void ci_thermal_initialize(struct radeon_device *rdev) ci_thermal_initialize() argument
1233 if (rdev->pm.fan_pulses_per_revolution) { ci_thermal_initialize()
1235 tmp |= EDGE_PER_REV(rdev->pm.fan_pulses_per_revolution -1); ci_thermal_initialize()
1244 static int ci_thermal_start_thermal_controller(struct radeon_device *rdev) ci_thermal_start_thermal_controller() argument
1248 ci_thermal_initialize(rdev); ci_thermal_start_thermal_controller()
1249 ret = ci_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); ci_thermal_start_thermal_controller()
1252 ret = ci_thermal_enable_alert(rdev, true); ci_thermal_start_thermal_controller()
1255 if (rdev->pm.dpm.fan.ucode_fan_control) { ci_thermal_start_thermal_controller()
1256 ret = ci_thermal_setup_fan_table(rdev); ci_thermal_start_thermal_controller()
1259 ci_thermal_start_smc_fan_control(rdev); ci_thermal_start_thermal_controller()
1265 static void ci_thermal_stop_thermal_controller(struct radeon_device *rdev) ci_thermal_stop_thermal_controller() argument
1267 if (!rdev->pm.no_fan) ci_thermal_stop_thermal_controller()
1268 ci_fan_ctrl_set_default_mode(rdev); ci_thermal_stop_thermal_controller()
1272 static int ci_read_smc_soft_register(struct radeon_device *rdev,
1275 struct ci_power_info *pi = ci_get_pi(rdev);
1277 return ci_read_smc_sram_dword(rdev,
1283 static int ci_write_smc_soft_register(struct radeon_device *rdev, ci_write_smc_soft_register() argument
1286 struct ci_power_info *pi = ci_get_pi(rdev); ci_write_smc_soft_register()
1288 return ci_write_smc_sram_dword(rdev, ci_write_smc_soft_register()
1293 static void ci_init_fps_limits(struct radeon_device *rdev) ci_init_fps_limits() argument
1295 struct ci_power_info *pi = ci_get_pi(rdev); ci_init_fps_limits()
1309 static int ci_update_sclk_t(struct radeon_device *rdev) ci_update_sclk_t() argument
1311 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_sclk_t()
1318 ret = ci_copy_bytes_to_smc(rdev, ci_update_sclk_t()
1329 static void ci_get_leakage_voltages(struct radeon_device *rdev) ci_get_leakage_voltages() argument
1331 struct ci_power_info *pi = ci_get_pi(rdev); ci_get_leakage_voltages()
1339 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_EVV) { ci_get_leakage_voltages()
1342 if (radeon_atom_get_voltage_evv(rdev, virtual_voltage_id, &vddc) != 0) ci_get_leakage_voltages()
1350 } else if (radeon_atom_get_leakage_id_from_vbios(rdev, &leakage_id) == 0) { ci_get_leakage_voltages()
1353 if (radeon_atom_get_leakage_vddc_based_on_leakage_params(rdev, &vddc, &vddci, ci_get_leakage_voltages()
1371 static void ci_set_dpm_event_sources(struct radeon_device *rdev, u32 sources) ci_set_dpm_event_sources() argument
1373 struct ci_power_info *pi = ci_get_pi(rdev); ci_set_dpm_event_sources()
1420 static void ci_enable_auto_throttle_source(struct radeon_device *rdev, ci_enable_auto_throttle_source() argument
1424 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_auto_throttle_source()
1429 ci_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); ci_enable_auto_throttle_source()
1434 ci_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); ci_enable_auto_throttle_source()
1439 static void ci_enable_vr_hot_gpio_interrupt(struct radeon_device *rdev) ci_enable_vr_hot_gpio_interrupt() argument
1441 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) ci_enable_vr_hot_gpio_interrupt()
1442 ci_send_msg_to_smc(rdev, PPSMC_MSG_EnableVRHotGPIOInterrupt); ci_enable_vr_hot_gpio_interrupt()
1445 static int ci_unfreeze_sclk_mclk_dpm(struct radeon_device *rdev) ci_unfreeze_sclk_mclk_dpm() argument
1447 struct ci_power_info *pi = ci_get_pi(rdev); ci_unfreeze_sclk_mclk_dpm()
1455 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_SCLKDPM_UnfreezeLevel); ci_unfreeze_sclk_mclk_dpm()
1462 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_MCLKDPM_UnfreezeLevel); ci_unfreeze_sclk_mclk_dpm()
1471 static int ci_enable_sclk_mclk_dpm(struct radeon_device *rdev, bool enable) ci_enable_sclk_mclk_dpm() argument
1473 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_sclk_mclk_dpm()
1478 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_DPM_Enable); ci_enable_sclk_mclk_dpm()
1484 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_MCLKDPM_Enable); ci_enable_sclk_mclk_dpm()
1502 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_DPM_Disable); ci_enable_sclk_mclk_dpm()
1508 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_MCLKDPM_Disable); ci_enable_sclk_mclk_dpm()
1517 static int ci_start_dpm(struct radeon_device *rdev) ci_start_dpm() argument
1519 struct ci_power_info *pi = ci_get_pi(rdev); ci_start_dpm()
1532 ci_write_smc_soft_register(rdev, offsetof(SMU7_SoftRegisters, VoltageChangeTimeout), 0x1000); ci_start_dpm()
1536 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_Voltage_Cntl_Enable); ci_start_dpm()
1540 ret = ci_enable_sclk_mclk_dpm(rdev, true); ci_start_dpm()
1545 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_PCIeDPM_Enable); ci_start_dpm()
1553 static int ci_freeze_sclk_mclk_dpm(struct radeon_device *rdev) ci_freeze_sclk_mclk_dpm() argument
1555 struct ci_power_info *pi = ci_get_pi(rdev); ci_freeze_sclk_mclk_dpm()
1563 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_SCLKDPM_FreezeLevel); ci_freeze_sclk_mclk_dpm()
1570 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_MCLKDPM_FreezeLevel); ci_freeze_sclk_mclk_dpm()
1578 static int ci_stop_dpm(struct radeon_device *rdev) ci_stop_dpm() argument
1580 struct ci_power_info *pi = ci_get_pi(rdev); ci_stop_dpm()
1594 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_PCIeDPM_Disable); ci_stop_dpm()
1599 ret = ci_enable_sclk_mclk_dpm(rdev, false); ci_stop_dpm()
1603 smc_result = ci_send_msg_to_smc(rdev, PPSMC_MSG_Voltage_Cntl_Disable); ci_stop_dpm()
1610 static void ci_enable_sclk_control(struct radeon_device *rdev, bool enable) ci_enable_sclk_control() argument
1622 static int ci_notify_hw_of_power_source(struct radeon_device *rdev,
1625 struct ci_power_info *pi = ci_get_pi(rdev);
1627 rdev->pm.dpm.dyn_state.cac_tdp_table;
1635 ci_set_power_limit(rdev, power_limit);
1639 ci_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC);
1641 ci_send_msg_to_smc(rdev, PPSMC_MSG_Remove_DC_Clamp);
1648 static PPSMC_Result ci_send_msg_to_smc_with_parameter(struct radeon_device *rdev, ci_send_msg_to_smc_with_parameter() argument
1652 return ci_send_msg_to_smc(rdev, msg); ci_send_msg_to_smc_with_parameter()
1655 static PPSMC_Result ci_send_msg_to_smc_return_parameter(struct radeon_device *rdev, ci_send_msg_to_smc_return_parameter() argument
1660 smc_result = ci_send_msg_to_smc(rdev, msg); ci_send_msg_to_smc_return_parameter()
1668 static int ci_dpm_force_state_sclk(struct radeon_device *rdev, u32 n) ci_dpm_force_state_sclk() argument
1670 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_force_state_sclk()
1674 ci_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SCLKDPM_SetEnabledMask, 1 << n); ci_dpm_force_state_sclk()
1682 static int ci_dpm_force_state_mclk(struct radeon_device *rdev, u32 n) ci_dpm_force_state_mclk() argument
1684 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_force_state_mclk()
1688 ci_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_MCLKDPM_SetEnabledMask, 1 << n); ci_dpm_force_state_mclk()
1696 static int ci_dpm_force_state_pcie(struct radeon_device *rdev, u32 n) ci_dpm_force_state_pcie() argument
1698 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_force_state_pcie()
1702 ci_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_PCIeDPM_ForceLevel, n); ci_dpm_force_state_pcie()
1710 static int ci_set_power_limit(struct radeon_device *rdev, u32 n) ci_set_power_limit() argument
1712 struct ci_power_info *pi = ci_get_pi(rdev); ci_set_power_limit()
1716 ci_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_PkgPwrSetLimit, n); ci_set_power_limit()
1724 static int ci_set_overdrive_target_tdp(struct radeon_device *rdev, ci_set_overdrive_target_tdp() argument
1728 ci_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_OverDriveSetTargetTdp, target_tdp); ci_set_overdrive_target_tdp()
1735 static int ci_set_boot_state(struct radeon_device *rdev)
1737 return ci_enable_sclk_mclk_dpm(rdev, false);
1741 static u32 ci_get_average_sclk_freq(struct radeon_device *rdev) ci_get_average_sclk_freq() argument
1745 ci_send_msg_to_smc_return_parameter(rdev, ci_get_average_sclk_freq()
1754 static u32 ci_get_average_mclk_freq(struct radeon_device *rdev) ci_get_average_mclk_freq() argument
1758 ci_send_msg_to_smc_return_parameter(rdev, ci_get_average_mclk_freq()
1767 static void ci_dpm_start_smc(struct radeon_device *rdev) ci_dpm_start_smc() argument
1771 ci_program_jump_on_start(rdev); ci_dpm_start_smc()
1772 ci_start_smc_clock(rdev); ci_dpm_start_smc()
1773 ci_start_smc(rdev); ci_dpm_start_smc()
1774 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_start_smc()
1780 static void ci_dpm_stop_smc(struct radeon_device *rdev) ci_dpm_stop_smc() argument
1782 ci_reset_smc(rdev); ci_dpm_stop_smc()
1783 ci_stop_smc_clock(rdev); ci_dpm_stop_smc()
1786 static int ci_process_firmware_header(struct radeon_device *rdev) ci_process_firmware_header() argument
1788 struct ci_power_info *pi = ci_get_pi(rdev); ci_process_firmware_header()
1792 ret = ci_read_smc_sram_dword(rdev, ci_process_firmware_header()
1801 ret = ci_read_smc_sram_dword(rdev, ci_process_firmware_header()
1810 ret = ci_read_smc_sram_dword(rdev, ci_process_firmware_header()
1819 ret = ci_read_smc_sram_dword(rdev, ci_process_firmware_header()
1828 ret = ci_read_smc_sram_dword(rdev, ci_process_firmware_header()
1840 static void ci_read_clock_registers(struct radeon_device *rdev) ci_read_clock_registers() argument
1842 struct ci_power_info *pi = ci_get_pi(rdev); ci_read_clock_registers()
1867 static void ci_init_sclk_t(struct radeon_device *rdev) ci_init_sclk_t() argument
1869 struct ci_power_info *pi = ci_get_pi(rdev); ci_init_sclk_t()
1874 static void ci_enable_thermal_protection(struct radeon_device *rdev, ci_enable_thermal_protection() argument
1886 static void ci_enable_acpi_power_management(struct radeon_device *rdev) ci_enable_acpi_power_management() argument
1896 static int ci_enter_ulp_state(struct radeon_device *rdev)
1906 static int ci_exit_ulp_state(struct radeon_device *rdev)
1914 for (i = 0; i < rdev->usec_timeout; i++) {
1924 static int ci_notify_smc_display_change(struct radeon_device *rdev, ci_notify_smc_display_change() argument
1929 return (ci_send_msg_to_smc(rdev, msg) == PPSMC_Result_OK) ? 0 : -EINVAL; ci_notify_smc_display_change()
1932 static int ci_enable_ds_master_switch(struct radeon_device *rdev, ci_enable_ds_master_switch() argument
1935 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_ds_master_switch()
1939 if (ci_send_msg_to_smc(rdev, PPSMC_MSG_MASTER_DeepSleep_ON) != PPSMC_Result_OK) ci_enable_ds_master_switch()
1942 if (ci_send_msg_to_smc(rdev, PPSMC_MSG_MASTER_DeepSleep_OFF) != PPSMC_Result_OK) ci_enable_ds_master_switch()
1947 if (ci_send_msg_to_smc(rdev, PPSMC_MSG_MASTER_DeepSleep_OFF) != PPSMC_Result_OK) ci_enable_ds_master_switch()
1955 static void ci_program_display_gap(struct radeon_device *rdev) ci_program_display_gap() argument
1960 u32 ref_clock = rdev->clock.spll.reference_freq; ci_program_display_gap()
1961 u32 refresh_rate = r600_dpm_get_vrefresh(rdev); ci_program_display_gap()
1962 u32 vblank_time = r600_dpm_get_vblank_time(rdev); ci_program_display_gap()
1965 if (rdev->pm.dpm.new_active_crtc_count > 0) ci_program_display_gap()
1981 ci_write_smc_soft_register(rdev, offsetof(SMU7_SoftRegisters, PreVBlankGap), 0x64); ci_program_display_gap()
1982 ci_write_smc_soft_register(rdev, offsetof(SMU7_SoftRegisters, VBlankTimeout), (frame_time_in_us - pre_vbi_time_in_us)); ci_program_display_gap()
1985 ci_notify_smc_display_change(rdev, (rdev->pm.dpm.new_active_crtc_count == 1)); ci_program_display_gap()
1989 static void ci_enable_spread_spectrum(struct radeon_device *rdev, bool enable) ci_enable_spread_spectrum() argument
1991 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_spread_spectrum()
2011 static void ci_program_sstp(struct radeon_device *rdev) ci_program_sstp() argument
2016 static void ci_enable_display_gap(struct radeon_device *rdev) ci_enable_display_gap() argument
2027 static void ci_program_vc(struct radeon_device *rdev) ci_program_vc() argument
2045 static void ci_clear_vc(struct radeon_device *rdev) ci_clear_vc() argument
2063 static int ci_upload_firmware(struct radeon_device *rdev) ci_upload_firmware() argument
2065 struct ci_power_info *pi = ci_get_pi(rdev); ci_upload_firmware()
2068 for (i = 0; i < rdev->usec_timeout; i++) { ci_upload_firmware()
2074 ci_stop_smc_clock(rdev); ci_upload_firmware()
2075 ci_reset_smc(rdev); ci_upload_firmware()
2077 ret = ci_load_smc_ucode(rdev, pi->sram_end); ci_upload_firmware()
2083 static int ci_get_svi2_voltage_table(struct radeon_device *rdev, ci_get_svi2_voltage_table() argument
2104 static int ci_construct_voltage_tables(struct radeon_device *rdev) ci_construct_voltage_tables() argument
2106 struct ci_power_info *pi = ci_get_pi(rdev); ci_construct_voltage_tables()
2110 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDC, ci_construct_voltage_tables()
2116 ret = ci_get_svi2_voltage_table(rdev, ci_construct_voltage_tables()
2117 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, ci_construct_voltage_tables()
2124 si_trim_voltage_table_to_fit_state_table(rdev, SMU7_MAX_LEVELS_VDDC, ci_construct_voltage_tables()
2128 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDCI, ci_construct_voltage_tables()
2134 ret = ci_get_svi2_voltage_table(rdev, ci_construct_voltage_tables()
2135 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, ci_construct_voltage_tables()
2142 si_trim_voltage_table_to_fit_state_table(rdev, SMU7_MAX_LEVELS_VDDCI, ci_construct_voltage_tables()
2146 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_MVDDC, ci_construct_voltage_tables()
2152 ret = ci_get_svi2_voltage_table(rdev, ci_construct_voltage_tables()
2153 &rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk, ci_construct_voltage_tables()
2160 si_trim_voltage_table_to_fit_state_table(rdev, SMU7_MAX_LEVELS_MVDD, ci_construct_voltage_tables()
2166 static void ci_populate_smc_voltage_table(struct radeon_device *rdev, ci_populate_smc_voltage_table() argument
2172 ret = ci_get_std_voltage_value_sidd(rdev, voltage_table, ci_populate_smc_voltage_table()
2188 static int ci_populate_smc_vddc_table(struct radeon_device *rdev, ci_populate_smc_vddc_table() argument
2191 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_vddc_table()
2196 ci_populate_smc_voltage_table(rdev, ci_populate_smc_vddc_table()
2211 static int ci_populate_smc_vddci_table(struct radeon_device *rdev, ci_populate_smc_vddci_table() argument
2215 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_vddci_table()
2219 ci_populate_smc_voltage_table(rdev, ci_populate_smc_vddci_table()
2234 static int ci_populate_smc_mvdd_table(struct radeon_device *rdev, ci_populate_smc_mvdd_table() argument
2237 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_mvdd_table()
2242 ci_populate_smc_voltage_table(rdev, ci_populate_smc_mvdd_table()
2257 static int ci_populate_smc_voltage_tables(struct radeon_device *rdev, ci_populate_smc_voltage_tables() argument
2262 ret = ci_populate_smc_vddc_table(rdev, table); ci_populate_smc_voltage_tables()
2266 ret = ci_populate_smc_vddci_table(rdev, table); ci_populate_smc_voltage_tables()
2270 ret = ci_populate_smc_mvdd_table(rdev, table); ci_populate_smc_voltage_tables()
2277 static int ci_populate_mvdd_value(struct radeon_device *rdev, u32 mclk, ci_populate_mvdd_value() argument
2280 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_mvdd_value()
2284 for (i = 0; i < rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.count; i++) { ci_populate_mvdd_value()
2285 if (mclk <= rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.entries[i].clk) { ci_populate_mvdd_value()
2291 if (i >= rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.count) ci_populate_mvdd_value()
2298 static int ci_get_std_voltage_value_sidd(struct radeon_device *rdev, ci_get_std_voltage_value_sidd() argument
2307 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL) ci_get_std_voltage_value_sidd()
2310 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries) { ci_get_std_voltage_value_sidd()
2311 for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) { ci_get_std_voltage_value_sidd()
2313 rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) { ci_get_std_voltage_value_sidd()
2315 if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count) ci_get_std_voltage_value_sidd()
2318 idx = rdev->pm.dpm.dyn_state.cac_leakage_table.count - 1; ci_get_std_voltage_value_sidd()
2320 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].vddc * VOLTAGE_SCALE; ci_get_std_voltage_value_sidd()
2322 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].leakage * VOLTAGE_SCALE; ci_get_std_voltage_value_sidd()
2328 for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) { ci_get_std_voltage_value_sidd()
2330 rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) { ci_get_std_voltage_value_sidd()
2332 if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count) ci_get_std_voltage_value_sidd()
2335 idx = rdev->pm.dpm.dyn_state.cac_leakage_table.count - 1; ci_get_std_voltage_value_sidd()
2337 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].vddc * VOLTAGE_SCALE; ci_get_std_voltage_value_sidd()
2339 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].leakage * VOLTAGE_SCALE; ci_get_std_voltage_value_sidd()
2349 static void ci_populate_phase_value_based_on_sclk(struct radeon_device *rdev, ci_populate_phase_value_based_on_sclk() argument
2366 static void ci_populate_phase_value_based_on_mclk(struct radeon_device *rdev, ci_populate_phase_value_based_on_mclk() argument
2383 static int ci_init_arb_table_index(struct radeon_device *rdev) ci_init_arb_table_index() argument
2385 struct ci_power_info *pi = ci_get_pi(rdev); ci_init_arb_table_index()
2389 ret = ci_read_smc_sram_dword(rdev, pi->arb_table_start, ci_init_arb_table_index()
2397 return ci_write_smc_sram_dword(rdev, pi->arb_table_start, ci_init_arb_table_index()
2401 static int ci_get_dependency_volt_by_clk(struct radeon_device *rdev, ci_get_dependency_volt_by_clk() argument
2422 static u8 ci_get_sleep_divider_id_from_clock(struct radeon_device *rdev, ci_get_sleep_divider_id_from_clock() argument
2442 static int ci_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev) ci_initial_switch_from_arb_f0_to_f1() argument
2444 return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1); ci_initial_switch_from_arb_f0_to_f1()
2447 static int ci_reset_to_default(struct radeon_device *rdev) ci_reset_to_default() argument
2449 return (ci_send_msg_to_smc(rdev, PPSMC_MSG_ResetToDefaults) == PPSMC_Result_OK) ? ci_reset_to_default()
2453 static int ci_force_switch_to_arb_f0(struct radeon_device *rdev) ci_force_switch_to_arb_f0() argument
2462 return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0); ci_force_switch_to_arb_f0()
2465 static void ci_register_patching_mc_arb(struct radeon_device *rdev, ci_register_patching_mc_arb() argument
2477 ((rdev->pdev->device == 0x67B0) || ci_register_patching_mc_arb()
2478 (rdev->pdev->device == 0x67B1))) { ci_register_patching_mc_arb()
2492 static int ci_populate_memory_timing_parameters(struct radeon_device *rdev, ci_populate_memory_timing_parameters() argument
2501 radeon_atom_set_engine_dram_timings(rdev, sclk, mclk); ci_populate_memory_timing_parameters()
2507 ci_register_patching_mc_arb(rdev, sclk, mclk, &dram_timing2); ci_populate_memory_timing_parameters()
2516 static int ci_do_program_memory_timing_parameters(struct radeon_device *rdev) ci_do_program_memory_timing_parameters() argument
2518 struct ci_power_info *pi = ci_get_pi(rdev); ci_do_program_memory_timing_parameters()
2527 ret = ci_populate_memory_timing_parameters(rdev, ci_do_program_memory_timing_parameters()
2537 ret = ci_copy_bytes_to_smc(rdev, ci_do_program_memory_timing_parameters()
2546 static int ci_program_memory_timing_parameters(struct radeon_device *rdev) ci_program_memory_timing_parameters() argument
2548 struct ci_power_info *pi = ci_get_pi(rdev); ci_program_memory_timing_parameters()
2553 return ci_do_program_memory_timing_parameters(rdev); ci_program_memory_timing_parameters()
2556 static void ci_populate_smc_initial_state(struct radeon_device *rdev, ci_populate_smc_initial_state() argument
2560 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_initial_state()
2563 for (level = 0; level < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; level++) { ci_populate_smc_initial_state()
2564 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[level].clk >= ci_populate_smc_initial_state()
2571 for (level = 0; level < rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.count; level++) { ci_populate_smc_initial_state()
2572 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries[level].clk >= ci_populate_smc_initial_state()
2596 static void ci_populate_smc_link_level(struct radeon_device *rdev, ci_populate_smc_link_level() argument
2599 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_link_level()
2618 static int ci_populate_smc_uvd_level(struct radeon_device *rdev, ci_populate_smc_uvd_level() argument
2626 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count; ci_populate_smc_uvd_level()
2630 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].vclk; ci_populate_smc_uvd_level()
2632 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].dclk; ci_populate_smc_uvd_level()
2634 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE; ci_populate_smc_uvd_level()
2637 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_uvd_level()
2645 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_uvd_level()
2661 static int ci_populate_smc_vce_level(struct radeon_device *rdev, ci_populate_smc_vce_level() argument
2669 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.count; ci_populate_smc_vce_level()
2673 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[count].evclk; ci_populate_smc_vce_level()
2675 (u16)rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE; ci_populate_smc_vce_level()
2678 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_vce_level()
2694 static int ci_populate_smc_acp_level(struct radeon_device *rdev, ci_populate_smc_acp_level() argument
2702 (rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.count); ci_populate_smc_acp_level()
2706 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[count].clk; ci_populate_smc_acp_level()
2708 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[count].v; ci_populate_smc_acp_level()
2711 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_acp_level()
2726 static int ci_populate_smc_samu_level(struct radeon_device *rdev, ci_populate_smc_samu_level() argument
2734 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.count; ci_populate_smc_samu_level()
2738 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[count].clk; ci_populate_smc_samu_level()
2740 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE; ci_populate_smc_samu_level()
2743 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_samu_level()
2758 static int ci_calculate_mclk_params(struct radeon_device *rdev, ci_calculate_mclk_params() argument
2764 struct ci_power_info *pi = ci_get_pi(rdev); ci_calculate_mclk_params()
2777 ret = radeon_atom_get_memory_pll_dividers(rdev, memory_clock, strobe_mode, &mpll_param); ci_calculate_mclk_params()
2801 u32 reference_clock = rdev->clock.mpll.reference_freq; ci_calculate_mclk_params()
2810 if (radeon_atombios_get_asic_ss_info(rdev, &ss, ci_calculate_mclk_params()
2845 static int ci_populate_single_memory_level(struct radeon_device *rdev, ci_populate_single_memory_level() argument
2849 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_single_memory_level()
2853 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries) { ci_populate_single_memory_level()
2854 ret = ci_get_dependency_volt_by_clk(rdev, ci_populate_single_memory_level()
2855 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, ci_populate_single_memory_level()
2861 if (rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries) { ci_populate_single_memory_level()
2862 ret = ci_get_dependency_volt_by_clk(rdev, ci_populate_single_memory_level()
2863 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, ci_populate_single_memory_level()
2869 if (rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.entries) { ci_populate_single_memory_level()
2870 ret = ci_get_dependency_volt_by_clk(rdev, ci_populate_single_memory_level()
2871 &rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk, ci_populate_single_memory_level()
2880 ci_populate_phase_value_based_on_mclk(rdev, ci_populate_single_memory_level()
2881 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, ci_populate_single_memory_level()
2903 (rdev->pm.dpm.new_active_crtc_count <= 2)) ci_populate_single_memory_level()
2935 ret = ci_calculate_mclk_params(rdev, memory_clock, memory_level, memory_level->StrobeEnable, dll_state_on); ci_populate_single_memory_level()
2959 static int ci_populate_smc_acpi_level(struct radeon_device *rdev, ci_populate_smc_acpi_level() argument
2962 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_smc_acpi_level()
2980 table->ACPILevel.SclkFrequency = rdev->clock.spll.reference_freq; ci_populate_smc_acpi_level()
2982 ret = radeon_atom_get_clock_dividers(rdev, ci_populate_smc_acpi_level()
3031 if (ci_populate_mvdd_value(rdev, 0, &voltage_level)) ci_populate_smc_acpi_level()
3075 static int ci_enable_ulv(struct radeon_device *rdev, bool enable) ci_enable_ulv() argument
3077 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_ulv()
3082 return (ci_send_msg_to_smc(rdev, PPSMC_MSG_EnableULV) == PPSMC_Result_OK) ? ci_enable_ulv()
3085 return (ci_send_msg_to_smc(rdev, PPSMC_MSG_DisableULV) == PPSMC_Result_OK) ? ci_enable_ulv()
3092 static int ci_populate_ulv_level(struct radeon_device *rdev, ci_populate_ulv_level() argument
3095 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_ulv_level()
3096 u16 ulv_voltage = rdev->pm.dpm.backbias_response_time; ci_populate_ulv_level()
3107 if (ulv_voltage > rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v) ci_populate_ulv_level()
3111 rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v - ulv_voltage; ci_populate_ulv_level()
3113 if (ulv_voltage > rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v) ci_populate_ulv_level()
3117 ((rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v - ulv_voltage) * ci_populate_ulv_level()
3129 static int ci_calculate_sclk_params(struct radeon_device *rdev, ci_calculate_sclk_params() argument
3133 struct ci_power_info *pi = ci_get_pi(rdev); ci_calculate_sclk_params()
3139 u32 reference_clock = rdev->clock.spll.reference_freq; ci_calculate_sclk_params()
3144 ret = radeon_atom_get_clock_dividers(rdev, ci_calculate_sclk_params()
3161 if (radeon_atombios_get_asic_ss_info(rdev, &ss, ci_calculate_sclk_params()
3185 static int ci_populate_single_graphic_level(struct radeon_device *rdev, ci_populate_single_graphic_level() argument
3190 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_single_graphic_level()
3193 ret = ci_calculate_sclk_params(rdev, engine_clock, graphic_level); ci_populate_single_graphic_level()
3197 ret = ci_get_dependency_volt_by_clk(rdev, ci_populate_single_graphic_level()
3198 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, ci_populate_single_graphic_level()
3209 ci_populate_phase_value_based_on_sclk(rdev, ci_populate_single_graphic_level()
3210 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, ci_populate_single_graphic_level()
3225 graphic_level->DeepSleepDivId = ci_get_sleep_divider_id_from_clock(rdev, ci_populate_single_graphic_level()
3246 static int ci_populate_all_graphic_levels(struct radeon_device *rdev) ci_populate_all_graphic_levels() argument
3248 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_all_graphic_levels()
3260 ret = ci_populate_single_graphic_level(rdev, ci_populate_all_graphic_levels()
3278 ret = ci_copy_bytes_to_smc(rdev, level_array_address, ci_populate_all_graphic_levels()
3287 static int ci_populate_ulv_state(struct radeon_device *rdev, ci_populate_ulv_state() argument
3290 return ci_populate_ulv_level(rdev, ulv_level); ci_populate_ulv_state()
3293 static int ci_populate_all_memory_levels(struct radeon_device *rdev) ci_populate_all_memory_levels() argument
3295 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_all_memory_levels()
3309 ret = ci_populate_single_memory_level(rdev, ci_populate_all_memory_levels()
3319 ((rdev->pdev->device == 0x67B0) || (rdev->pdev->device == 0x67B1))) { ci_populate_all_memory_levels()
3335 ret = ci_copy_bytes_to_smc(rdev, level_array_address, ci_populate_all_memory_levels()
3344 static void ci_reset_single_dpm_table(struct radeon_device *rdev, ci_reset_single_dpm_table() argument
3363 static int ci_setup_default_pcie_tables(struct radeon_device *rdev) ci_setup_default_pcie_tables() argument
3365 struct ci_power_info *pi = ci_get_pi(rdev); ci_setup_default_pcie_tables()
3378 ci_reset_single_dpm_table(rdev, ci_setup_default_pcie_tables()
3382 if (rdev->family == CHIP_BONAIRE) ci_setup_default_pcie_tables()
3411 static int ci_setup_default_dpm_tables(struct radeon_device *rdev) ci_setup_default_dpm_tables() argument
3413 struct ci_power_info *pi = ci_get_pi(rdev); ci_setup_default_dpm_tables()
3415 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; ci_setup_default_dpm_tables()
3417 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk; ci_setup_default_dpm_tables()
3419 &rdev->pm.dpm.dyn_state.cac_leakage_table; ci_setup_default_dpm_tables()
3433 ci_reset_single_dpm_table(rdev, ci_setup_default_dpm_tables()
3436 ci_reset_single_dpm_table(rdev, ci_setup_default_dpm_tables()
3439 ci_reset_single_dpm_table(rdev, ci_setup_default_dpm_tables()
3442 ci_reset_single_dpm_table(rdev, ci_setup_default_dpm_tables()
3445 ci_reset_single_dpm_table(rdev, ci_setup_default_dpm_tables()
3484 allowed_mclk_table = &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk; ci_setup_default_dpm_tables()
3494 allowed_mclk_table = &rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk; ci_setup_default_dpm_tables()
3504 ci_setup_default_pcie_tables(rdev); ci_setup_default_dpm_tables()
3525 static int ci_init_smc_table(struct radeon_device *rdev) ci_init_smc_table() argument
3527 struct ci_power_info *pi = ci_get_pi(rdev); ci_init_smc_table()
3529 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps; ci_init_smc_table()
3533 ret = ci_setup_default_dpm_tables(rdev); ci_init_smc_table()
3538 ci_populate_smc_voltage_tables(rdev, table); ci_init_smc_table()
3540 ci_init_fps_limits(rdev); ci_init_smc_table()
3542 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) ci_init_smc_table()
3545 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) ci_init_smc_table()
3552 ret = ci_populate_ulv_state(rdev, &pi->smc_state_table.Ulv); ci_init_smc_table()
3558 ret = ci_populate_all_graphic_levels(rdev); ci_init_smc_table()
3562 ret = ci_populate_all_memory_levels(rdev); ci_init_smc_table()
3566 ci_populate_smc_link_level(rdev, table); ci_init_smc_table()
3568 ret = ci_populate_smc_acpi_level(rdev, table); ci_init_smc_table()
3572 ret = ci_populate_smc_vce_level(rdev, table); ci_init_smc_table()
3576 ret = ci_populate_smc_acp_level(rdev, table); ci_init_smc_table()
3580 ret = ci_populate_smc_samu_level(rdev, table); ci_init_smc_table()
3584 ret = ci_do_program_memory_timing_parameters(rdev); ci_init_smc_table()
3588 ret = ci_populate_smc_uvd_level(rdev, table); ci_init_smc_table()
3611 ci_populate_smc_initial_state(rdev, radeon_boot_state); ci_init_smc_table()
3613 ret = ci_populate_bapm_parameters_in_dpm_table(rdev); ci_init_smc_table()
3661 ret = ci_copy_bytes_to_smc(rdev, ci_init_smc_table()
3673 static void ci_trim_single_dpm_states(struct radeon_device *rdev, ci_trim_single_dpm_states() argument
3688 static void ci_trim_pcie_dpm_states(struct radeon_device *rdev, ci_trim_pcie_dpm_states() argument
3692 struct ci_power_info *pi = ci_get_pi(rdev); ci_trim_pcie_dpm_states()
3719 static int ci_trim_dpm_states(struct radeon_device *rdev, ci_trim_dpm_states() argument
3723 struct ci_power_info *pi = ci_get_pi(rdev); ci_trim_dpm_states()
3734 ci_trim_single_dpm_states(rdev, ci_trim_dpm_states()
3739 ci_trim_single_dpm_states(rdev, ci_trim_dpm_states()
3744 ci_trim_pcie_dpm_states(rdev, ci_trim_dpm_states()
3753 static int ci_apply_disp_minimum_voltage_request(struct radeon_device *rdev) ci_apply_disp_minimum_voltage_request() argument
3756 &rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk; ci_apply_disp_minimum_voltage_request()
3758 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; ci_apply_disp_minimum_voltage_request()
3768 if (rdev->clock.current_dispclk == disp_voltage_table->entries[i].clk) ci_apply_disp_minimum_voltage_request()
3775 return (ci_send_msg_to_smc_with_parameter(rdev, ci_apply_disp_minimum_voltage_request()
3785 static int ci_upload_dpm_level_enable_mask(struct radeon_device *rdev) ci_upload_dpm_level_enable_mask() argument
3787 struct ci_power_info *pi = ci_get_pi(rdev); ci_upload_dpm_level_enable_mask()
3790 ci_apply_disp_minimum_voltage_request(rdev); ci_upload_dpm_level_enable_mask()
3794 result = ci_send_msg_to_smc_with_parameter(rdev, ci_upload_dpm_level_enable_mask()
3804 result = ci_send_msg_to_smc_with_parameter(rdev, ci_upload_dpm_level_enable_mask()
3814 result = ci_send_msg_to_smc_with_parameter(rdev, ci_upload_dpm_level_enable_mask()
3825 static void ci_find_dpm_states_clocks_in_dpm_table(struct radeon_device *rdev, ci_find_dpm_states_clocks_in_dpm_table() argument
3828 struct ci_power_info *pi = ci_get_pi(rdev); ci_find_dpm_states_clocks_in_dpm_table()
3859 if (rdev->pm.dpm.current_active_crtc_count != ci_find_dpm_states_clocks_in_dpm_table()
3860 rdev->pm.dpm.new_active_crtc_count) ci_find_dpm_states_clocks_in_dpm_table()
3864 static int ci_populate_and_upload_sclk_mclk_dpm_levels(struct radeon_device *rdev, ci_populate_and_upload_sclk_mclk_dpm_levels() argument
3867 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_and_upload_sclk_mclk_dpm_levels()
3884 ret = ci_populate_all_graphic_levels(rdev); ci_populate_and_upload_sclk_mclk_dpm_levels()
3890 ret = ci_populate_all_memory_levels(rdev); ci_populate_and_upload_sclk_mclk_dpm_levels()
3898 static int ci_enable_uvd_dpm(struct radeon_device *rdev, bool enable) ci_enable_uvd_dpm() argument
3900 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_uvd_dpm()
3904 if (rdev->pm.dpm.ac_power) ci_enable_uvd_dpm()
3905 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ci_enable_uvd_dpm()
3907 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; ci_enable_uvd_dpm()
3912 for (i = rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count - 1; i >= 0; i--) { ci_enable_uvd_dpm()
3913 if (rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) { ci_enable_uvd_dpm()
3921 ci_send_msg_to_smc_with_parameter(rdev, ci_enable_uvd_dpm()
3928 ci_send_msg_to_smc_with_parameter(rdev, ci_enable_uvd_dpm()
3936 ci_send_msg_to_smc_with_parameter(rdev, ci_enable_uvd_dpm()
3942 return (ci_send_msg_to_smc(rdev, enable ? ci_enable_uvd_dpm()
3947 static int ci_enable_vce_dpm(struct radeon_device *rdev, bool enable) ci_enable_vce_dpm() argument
3949 struct ci_power_info *pi = ci_get_pi(rdev); ci_enable_vce_dpm()
3953 if (rdev->pm.dpm.ac_power) ci_enable_vce_dpm()
3954 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ci_enable_vce_dpm()
3956 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; ci_enable_vce_dpm()
3960 for (i = rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.count - 1; i >= 0; i--) { ci_enable_vce_dpm()
3961 if (rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) { ci_enable_vce_dpm()
3969 ci_send_msg_to_smc_with_parameter(rdev, ci_enable_vce_dpm()
3974 return (ci_send_msg_to_smc(rdev, enable ? ci_enable_vce_dpm()
3980 static int ci_enable_samu_dpm(struct radeon_device *rdev, bool enable)
3982 struct ci_power_info *pi = ci_get_pi(rdev);
3986 if (rdev->pm.dpm.ac_power)
3987 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
3989 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
3993 for (i = rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.count - 1; i >= 0; i--) {
3994 if (rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
4002 ci_send_msg_to_smc_with_parameter(rdev,
4006 return (ci_send_msg_to_smc(rdev, enable ?
4011 static int ci_enable_acp_dpm(struct radeon_device *rdev, bool enable)
4013 struct ci_power_info *pi = ci_get_pi(rdev);
4017 if (rdev->pm.dpm.ac_power)
4018 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4020 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
4024 for (i = rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.count - 1; i >= 0; i--) {
4025 if (rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
4033 ci_send_msg_to_smc_with_parameter(rdev,
4038 return (ci_send_msg_to_smc(rdev, enable ?
4044 static int ci_update_uvd_dpm(struct radeon_device *rdev, bool gate) ci_update_uvd_dpm() argument
4046 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_uvd_dpm()
4051 (rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count <= 0)) ci_update_uvd_dpm()
4055 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count - 1; ci_update_uvd_dpm()
4063 return ci_enable_uvd_dpm(rdev, !gate); ci_update_uvd_dpm()
4066 static u8 ci_get_vce_boot_level(struct radeon_device *rdev) ci_get_vce_boot_level() argument
4071 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; ci_get_vce_boot_level()
4081 static int ci_update_vce_dpm(struct radeon_device *rdev, ci_update_vce_dpm() argument
4085 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_vce_dpm()
4092 cik_update_cg(rdev, RADEON_CG_BLOCK_VCE, false); ci_update_vce_dpm()
4094 pi->smc_state_table.VceBootLevel = ci_get_vce_boot_level(rdev); ci_update_vce_dpm()
4100 ret = ci_enable_vce_dpm(rdev, true); ci_update_vce_dpm()
4103 cik_update_cg(rdev, RADEON_CG_BLOCK_VCE, true); ci_update_vce_dpm()
4105 ret = ci_enable_vce_dpm(rdev, false); ci_update_vce_dpm()
4112 static int ci_update_samu_dpm(struct radeon_device *rdev, bool gate)
4114 return ci_enable_samu_dpm(rdev, gate);
4117 static int ci_update_acp_dpm(struct radeon_device *rdev, bool gate)
4119 struct ci_power_info *pi = ci_get_pi(rdev);
4131 return ci_enable_acp_dpm(rdev, !gate);
4135 static int ci_generate_dpm_level_enable_mask(struct radeon_device *rdev, ci_generate_dpm_level_enable_mask() argument
4138 struct ci_power_info *pi = ci_get_pi(rdev); ci_generate_dpm_level_enable_mask()
4141 ret = ci_trim_dpm_states(rdev, radeon_state); ci_generate_dpm_level_enable_mask()
4161 static u32 ci_get_lowest_enabled_level(struct radeon_device *rdev, ci_get_lowest_enabled_level() argument
4173 int ci_dpm_force_performance_level(struct radeon_device *rdev, ci_dpm_force_performance_level() argument
4176 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_force_performance_level()
4188 ret = ci_dpm_force_state_pcie(rdev, level); ci_dpm_force_performance_level()
4191 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4207 ret = ci_dpm_force_state_sclk(rdev, levels); ci_dpm_force_performance_level()
4210 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4226 ret = ci_dpm_force_state_mclk(rdev, levels); ci_dpm_force_performance_level()
4229 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4241 levels = ci_get_lowest_enabled_level(rdev, ci_dpm_force_performance_level()
4243 ret = ci_dpm_force_state_sclk(rdev, levels); ci_dpm_force_performance_level()
4246 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4256 levels = ci_get_lowest_enabled_level(rdev, ci_dpm_force_performance_level()
4258 ret = ci_dpm_force_state_mclk(rdev, levels); ci_dpm_force_performance_level()
4261 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4271 levels = ci_get_lowest_enabled_level(rdev, ci_dpm_force_performance_level()
4273 ret = ci_dpm_force_state_pcie(rdev, levels); ci_dpm_force_performance_level()
4276 for (i = 0; i < rdev->usec_timeout; i++) { ci_dpm_force_performance_level()
4288 smc_result = ci_send_msg_to_smc(rdev, ci_dpm_force_performance_level()
4293 ret = ci_upload_dpm_level_enable_mask(rdev); ci_dpm_force_performance_level()
4298 rdev->pm.dpm.forced_level = level; ci_dpm_force_performance_level()
4303 static int ci_set_mc_special_registers(struct radeon_device *rdev, ci_set_mc_special_registers() argument
4306 struct ci_power_info *pi = ci_get_pi(rdev); ci_set_mc_special_registers()
4501 static int ci_register_patching_mc_seq(struct radeon_device *rdev, ci_register_patching_mc_seq() argument
4512 ((rdev->pdev->device == 0x67B0) || ci_register_patching_mc_seq()
4513 (rdev->pdev->device == 0x67B1))) { ci_register_patching_mc_seq()
4591 static int ci_initialize_mc_reg_table(struct radeon_device *rdev) ci_initialize_mc_reg_table() argument
4593 struct ci_power_info *pi = ci_get_pi(rdev); ci_initialize_mc_reg_table()
4596 u8 module_index = rv770_get_memory_module_index(rdev); ci_initialize_mc_reg_table()
4624 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table); ci_initialize_mc_reg_table()
4634 ret = ci_register_patching_mc_seq(rdev, ci_table); ci_initialize_mc_reg_table()
4638 ret = ci_set_mc_special_registers(rdev, ci_table); ci_initialize_mc_reg_table()
4650 static int ci_populate_mc_reg_addresses(struct radeon_device *rdev, ci_populate_mc_reg_addresses() argument
4653 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_mc_reg_addresses()
4685 static void ci_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, ci_convert_mc_reg_table_entry_to_smc() argument
4689 struct ci_power_info *pi = ci_get_pi(rdev); ci_convert_mc_reg_table_entry_to_smc()
4705 static void ci_convert_mc_reg_table_to_smc(struct radeon_device *rdev, ci_convert_mc_reg_table_to_smc() argument
4708 struct ci_power_info *pi = ci_get_pi(rdev); ci_convert_mc_reg_table_to_smc()
4712 ci_convert_mc_reg_table_entry_to_smc(rdev, ci_convert_mc_reg_table_to_smc()
4717 static int ci_populate_initial_mc_reg_table(struct radeon_device *rdev) ci_populate_initial_mc_reg_table() argument
4719 struct ci_power_info *pi = ci_get_pi(rdev); ci_populate_initial_mc_reg_table()
4724 ret = ci_populate_mc_reg_addresses(rdev, &pi->smc_mc_reg_table); ci_populate_initial_mc_reg_table()
4727 ci_convert_mc_reg_table_to_smc(rdev, &pi->smc_mc_reg_table); ci_populate_initial_mc_reg_table()
4729 return ci_copy_bytes_to_smc(rdev, ci_populate_initial_mc_reg_table()
4736 static int ci_update_and_upload_mc_reg_table(struct radeon_device *rdev) ci_update_and_upload_mc_reg_table() argument
4738 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_and_upload_mc_reg_table()
4745 ci_convert_mc_reg_table_to_smc(rdev, &pi->smc_mc_reg_table); ci_update_and_upload_mc_reg_table()
4747 return ci_copy_bytes_to_smc(rdev, ci_update_and_upload_mc_reg_table()
4756 static void ci_enable_voltage_control(struct radeon_device *rdev) ci_enable_voltage_control() argument
4764 static enum radeon_pcie_gen ci_get_maximum_link_speed(struct radeon_device *rdev, ci_get_maximum_link_speed() argument
4780 static u16 ci_get_current_pcie_speed(struct radeon_device *rdev) ci_get_current_pcie_speed() argument
4790 static int ci_get_current_pcie_lane_number(struct radeon_device *rdev) ci_get_current_pcie_lane_number() argument
4816 static void ci_request_link_speed_change_before_state_change(struct radeon_device *rdev, ci_request_link_speed_change_before_state_change() argument
4820 struct ci_power_info *pi = ci_get_pi(rdev); ci_request_link_speed_change_before_state_change()
4822 ci_get_maximum_link_speed(rdev, radeon_new_state); ci_request_link_speed_change_before_state_change()
4826 current_link_speed = ci_get_maximum_link_speed(rdev, radeon_current_state); ci_request_link_speed_change_before_state_change()
4836 if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN3, false) == 0) ci_request_link_speed_change_before_state_change()
4842 if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, false) == 0) ci_request_link_speed_change_before_state_change()
4846 pi->force_pcie_gen = ci_get_current_pcie_speed(rdev); ci_request_link_speed_change_before_state_change()
4855 static void ci_notify_link_speed_change_after_state_change(struct radeon_device *rdev, ci_notify_link_speed_change_after_state_change() argument
4859 struct ci_power_info *pi = ci_get_pi(rdev); ci_notify_link_speed_change_after_state_change()
4861 ci_get_maximum_link_speed(rdev, radeon_new_state); ci_notify_link_speed_change_after_state_change()
4873 (ci_get_current_pcie_speed(rdev) > 0)) ci_notify_link_speed_change_after_state_change()
4877 radeon_acpi_pcie_performance_request(rdev, request, false); ci_notify_link_speed_change_after_state_change()
4882 static int ci_set_private_data_variables_based_on_pptable(struct radeon_device *rdev) ci_set_private_data_variables_based_on_pptable() argument
4884 struct ci_power_info *pi = ci_get_pi(rdev); ci_set_private_data_variables_based_on_pptable()
4886 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk; ci_set_private_data_variables_based_on_pptable()
4888 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk; ci_set_private_data_variables_based_on_pptable()
4890 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk; ci_set_private_data_variables_based_on_pptable()
4913 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = ci_set_private_data_variables_based_on_pptable()
4915 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = ci_set_private_data_variables_based_on_pptable()
4917 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = ci_set_private_data_variables_based_on_pptable()
4919 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = ci_set_private_data_variables_based_on_pptable()
4925 static void ci_patch_with_vddc_leakage(struct radeon_device *rdev, u16 *vddc) ci_patch_with_vddc_leakage() argument
4927 struct ci_power_info *pi = ci_get_pi(rdev); ci_patch_with_vddc_leakage()
4939 static void ci_patch_with_vddci_leakage(struct radeon_device *rdev, u16 *vddci) ci_patch_with_vddci_leakage() argument
4941 struct ci_power_info *pi = ci_get_pi(rdev); ci_patch_with_vddci_leakage()
4953 static void ci_patch_clock_voltage_dependency_table_with_vddc_leakage(struct radeon_device *rdev, ci_patch_clock_voltage_dependency_table_with_vddc_leakage() argument
4960 ci_patch_with_vddc_leakage(rdev, &table->entries[i].v); ci_patch_clock_voltage_dependency_table_with_vddc_leakage()
4964 static void ci_patch_clock_voltage_dependency_table_with_vddci_leakage(struct radeon_device *rdev, ci_patch_clock_voltage_dependency_table_with_vddci_leakage() argument
4971 ci_patch_with_vddci_leakage(rdev, &table->entries[i].v); ci_patch_clock_voltage_dependency_table_with_vddci_leakage()
4975 static void ci_patch_vce_clock_voltage_dependency_table_with_vddc_leakage(struct radeon_device *rdev, ci_patch_vce_clock_voltage_dependency_table_with_vddc_leakage() argument
4982 ci_patch_with_vddc_leakage(rdev, &table->entries[i].v); ci_patch_vce_clock_voltage_dependency_table_with_vddc_leakage()
4986 static void ci_patch_uvd_clock_voltage_dependency_table_with_vddc_leakage(struct radeon_device *rdev, ci_patch_uvd_clock_voltage_dependency_table_with_vddc_leakage() argument
4993 ci_patch_with_vddc_leakage(rdev, &table->entries[i].v); ci_patch_uvd_clock_voltage_dependency_table_with_vddc_leakage()
4997 static void ci_patch_vddc_phase_shed_limit_table_with_vddc_leakage(struct radeon_device *rdev, ci_patch_vddc_phase_shed_limit_table_with_vddc_leakage() argument
5004 ci_patch_with_vddc_leakage(rdev, &table->entries[i].voltage); ci_patch_vddc_phase_shed_limit_table_with_vddc_leakage()
5008 static void ci_patch_clock_voltage_limits_with_vddc_leakage(struct radeon_device *rdev, ci_patch_clock_voltage_limits_with_vddc_leakage() argument
5012 ci_patch_with_vddc_leakage(rdev, (u16 *)&table->vddc); ci_patch_clock_voltage_limits_with_vddc_leakage()
5013 ci_patch_with_vddci_leakage(rdev, (u16 *)&table->vddci); ci_patch_clock_voltage_limits_with_vddc_leakage()
5017 static void ci_patch_cac_leakage_table_with_vddc_leakage(struct radeon_device *rdev, ci_patch_cac_leakage_table_with_vddc_leakage() argument
5024 ci_patch_with_vddc_leakage(rdev, &table->entries[i].vddc); ci_patch_cac_leakage_table_with_vddc_leakage()
5028 static void ci_patch_dependency_tables_with_leakage(struct radeon_device *rdev) ci_patch_dependency_tables_with_leakage() argument
5031 ci_patch_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5032 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk); ci_patch_dependency_tables_with_leakage()
5033 ci_patch_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5034 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk); ci_patch_dependency_tables_with_leakage()
5035 ci_patch_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5036 &rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk); ci_patch_dependency_tables_with_leakage()
5037 ci_patch_clock_voltage_dependency_table_with_vddci_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5038 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk); ci_patch_dependency_tables_with_leakage()
5039 ci_patch_vce_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5040 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table); ci_patch_dependency_tables_with_leakage()
5041 ci_patch_uvd_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5042 &rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table); ci_patch_dependency_tables_with_leakage()
5043 ci_patch_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5044 &rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table); ci_patch_dependency_tables_with_leakage()
5045 ci_patch_clock_voltage_dependency_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5046 &rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table); ci_patch_dependency_tables_with_leakage()
5047 ci_patch_vddc_phase_shed_limit_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5048 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table); ci_patch_dependency_tables_with_leakage()
5049 ci_patch_clock_voltage_limits_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5050 &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac); ci_patch_dependency_tables_with_leakage()
5051 ci_patch_clock_voltage_limits_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5052 &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc); ci_patch_dependency_tables_with_leakage()
5053 ci_patch_cac_leakage_table_with_vddc_leakage(rdev, ci_patch_dependency_tables_with_leakage()
5054 &rdev->pm.dpm.dyn_state.cac_leakage_table); ci_patch_dependency_tables_with_leakage()
5058 static void ci_get_memory_type(struct radeon_device *rdev) ci_get_memory_type() argument
5060 struct ci_power_info *pi = ci_get_pi(rdev); ci_get_memory_type()
5073 static void ci_update_current_ps(struct radeon_device *rdev, ci_update_current_ps() argument
5077 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_current_ps()
5084 static void ci_update_requested_ps(struct radeon_device *rdev, ci_update_requested_ps() argument
5088 struct ci_power_info *pi = ci_get_pi(rdev); ci_update_requested_ps()
5095 int ci_dpm_pre_set_power_state(struct radeon_device *rdev) ci_dpm_pre_set_power_state() argument
5097 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_pre_set_power_state()
5098 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; ci_dpm_pre_set_power_state()
5101 ci_update_requested_ps(rdev, new_ps); ci_dpm_pre_set_power_state()
5103 ci_apply_state_adjust_rules(rdev, &pi->requested_rps); ci_dpm_pre_set_power_state()
5108 void ci_dpm_post_set_power_state(struct radeon_device *rdev) ci_dpm_post_set_power_state() argument
5110 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_post_set_power_state()
5113 ci_update_current_ps(rdev, new_ps); ci_dpm_post_set_power_state()
5117 void ci_dpm_setup_asic(struct radeon_device *rdev) ci_dpm_setup_asic() argument
5121 r = ci_mc_load_microcode(rdev); ci_dpm_setup_asic()
5124 ci_read_clock_registers(rdev); ci_dpm_setup_asic()
5125 ci_get_memory_type(rdev); ci_dpm_setup_asic()
5126 ci_enable_acpi_power_management(rdev); ci_dpm_setup_asic()
5127 ci_init_sclk_t(rdev); ci_dpm_setup_asic()
5130 int ci_dpm_enable(struct radeon_device *rdev) ci_dpm_enable() argument
5132 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_enable()
5133 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; ci_dpm_enable()
5136 if (ci_is_smc_running(rdev)) ci_dpm_enable()
5139 ci_enable_voltage_control(rdev); ci_dpm_enable()
5140 ret = ci_construct_voltage_tables(rdev); ci_dpm_enable()
5147 ret = ci_initialize_mc_reg_table(rdev); ci_dpm_enable()
5152 ci_enable_spread_spectrum(rdev, true); ci_dpm_enable()
5154 ci_enable_thermal_protection(rdev, true); ci_dpm_enable()
5155 ci_program_sstp(rdev); ci_dpm_enable()
5156 ci_enable_display_gap(rdev); ci_dpm_enable()
5157 ci_program_vc(rdev); ci_dpm_enable()
5158 ret = ci_upload_firmware(rdev); ci_dpm_enable()
5163 ret = ci_process_firmware_header(rdev); ci_dpm_enable()
5168 ret = ci_initial_switch_from_arb_f0_to_f1(rdev); ci_dpm_enable()
5173 ret = ci_init_smc_table(rdev); ci_dpm_enable()
5178 ret = ci_init_arb_table_index(rdev); ci_dpm_enable()
5184 ret = ci_populate_initial_mc_reg_table(rdev); ci_dpm_enable()
5190 ret = ci_populate_pm_base(rdev); ci_dpm_enable()
5195 ci_dpm_start_smc(rdev); ci_dpm_enable()
5196 ci_enable_vr_hot_gpio_interrupt(rdev); ci_dpm_enable()
5197 ret = ci_notify_smc_display_change(rdev, false); ci_dpm_enable()
5202 ci_enable_sclk_control(rdev, true); ci_dpm_enable()
5203 ret = ci_enable_ulv(rdev, true); ci_dpm_enable()
5208 ret = ci_enable_ds_master_switch(rdev, true); ci_dpm_enable()
5213 ret = ci_start_dpm(rdev); ci_dpm_enable()
5218 ret = ci_enable_didt(rdev, true); ci_dpm_enable()
5223 ret = ci_enable_smc_cac(rdev, true); ci_dpm_enable()
5228 ret = ci_enable_power_containment(rdev, true); ci_dpm_enable()
5234 ret = ci_power_control_set_level(rdev); ci_dpm_enable()
5240 ci_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); ci_dpm_enable()
5242 ret = ci_enable_thermal_based_sclk_dpm(rdev, true); ci_dpm_enable()
5248 ci_thermal_start_thermal_controller(rdev); ci_dpm_enable()
5250 ci_update_current_ps(rdev, boot_ps); ci_dpm_enable()
5255 static int ci_set_temperature_range(struct radeon_device *rdev) ci_set_temperature_range() argument
5259 ret = ci_thermal_enable_alert(rdev, false); ci_set_temperature_range()
5262 ret = ci_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); ci_set_temperature_range()
5265 ret = ci_thermal_enable_alert(rdev, true); ci_set_temperature_range()
5272 int ci_dpm_late_enable(struct radeon_device *rdev) ci_dpm_late_enable() argument
5276 ret = ci_set_temperature_range(rdev); ci_dpm_late_enable()
5280 ci_dpm_powergate_uvd(rdev, true); ci_dpm_late_enable()
5285 void ci_dpm_disable(struct radeon_device *rdev) ci_dpm_disable() argument
5287 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_disable()
5288 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; ci_dpm_disable()
5290 ci_dpm_powergate_uvd(rdev, false); ci_dpm_disable()
5292 if (!ci_is_smc_running(rdev)) ci_dpm_disable()
5295 ci_thermal_stop_thermal_controller(rdev); ci_dpm_disable()
5298 ci_enable_thermal_protection(rdev, false); ci_dpm_disable()
5299 ci_enable_power_containment(rdev, false); ci_dpm_disable()
5300 ci_enable_smc_cac(rdev, false); ci_dpm_disable()
5301 ci_enable_didt(rdev, false); ci_dpm_disable()
5302 ci_enable_spread_spectrum(rdev, false); ci_dpm_disable()
5303 ci_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false); ci_dpm_disable()
5304 ci_stop_dpm(rdev); ci_dpm_disable()
5305 ci_enable_ds_master_switch(rdev, false); ci_dpm_disable()
5306 ci_enable_ulv(rdev, false); ci_dpm_disable()
5307 ci_clear_vc(rdev); ci_dpm_disable()
5308 ci_reset_to_default(rdev); ci_dpm_disable()
5309 ci_dpm_stop_smc(rdev); ci_dpm_disable()
5310 ci_force_switch_to_arb_f0(rdev); ci_dpm_disable()
5311 ci_enable_thermal_based_sclk_dpm(rdev, false); ci_dpm_disable()
5313 ci_update_current_ps(rdev, boot_ps); ci_dpm_disable()
5316 int ci_dpm_set_power_state(struct radeon_device *rdev) ci_dpm_set_power_state() argument
5318 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_set_power_state()
5323 ci_find_dpm_states_clocks_in_dpm_table(rdev, new_ps); ci_dpm_set_power_state()
5325 ci_request_link_speed_change_before_state_change(rdev, new_ps, old_ps); ci_dpm_set_power_state()
5326 ret = ci_freeze_sclk_mclk_dpm(rdev); ci_dpm_set_power_state()
5331 ret = ci_populate_and_upload_sclk_mclk_dpm_levels(rdev, new_ps); ci_dpm_set_power_state()
5336 ret = ci_generate_dpm_level_enable_mask(rdev, new_ps); ci_dpm_set_power_state()
5342 ret = ci_update_vce_dpm(rdev, new_ps, old_ps); ci_dpm_set_power_state()
5348 ret = ci_update_sclk_t(rdev); ci_dpm_set_power_state()
5354 ret = ci_update_and_upload_mc_reg_table(rdev); ci_dpm_set_power_state()
5360 ret = ci_program_memory_timing_parameters(rdev); ci_dpm_set_power_state()
5365 ret = ci_unfreeze_sclk_mclk_dpm(rdev); ci_dpm_set_power_state()
5370 ret = ci_upload_dpm_level_enable_mask(rdev); ci_dpm_set_power_state()
5376 ci_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); ci_dpm_set_power_state()
5382 void ci_dpm_reset_asic(struct radeon_device *rdev)
5384 ci_set_boot_state(rdev);
5388 void ci_dpm_display_configuration_changed(struct radeon_device *rdev) ci_dpm_display_configuration_changed() argument
5390 ci_program_display_gap(rdev); ci_dpm_display_configuration_changed()
5416 static void ci_parse_pplib_non_clock_info(struct radeon_device *rdev, ci_parse_pplib_non_clock_info() argument
5434 rdev->pm.dpm.boot_ps = rps; ci_parse_pplib_non_clock_info()
5436 rdev->pm.dpm.uvd_ps = rps; ci_parse_pplib_non_clock_info()
5439 static void ci_parse_pplib_clock_info(struct radeon_device *rdev, ci_parse_pplib_clock_info() argument
5443 struct ci_power_info *pi = ci_get_pi(rdev); ci_parse_pplib_clock_info()
5454 pl->pcie_gen = r600_get_pcie_gen_support(rdev, ci_parse_pplib_clock_info()
5458 pl->pcie_lane = r600_get_pcie_lane_support(rdev, ci_parse_pplib_clock_info()
5508 static int ci_parse_power_table(struct radeon_device *rdev) ci_parse_power_table() argument
5510 struct radeon_mode_info *mode_info = &rdev->mode_info; ci_parse_power_table()
5540 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * ci_parse_power_table()
5542 if (!rdev->pm.dpm.ps) ci_parse_power_table()
5551 if (!rdev->pm.power_state[i].clock_info) ci_parse_power_table()
5555 kfree(rdev->pm.dpm.ps); ci_parse_power_table()
5558 rdev->pm.dpm.ps[i].ps_priv = ps; ci_parse_power_table()
5559 ci_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ci_parse_power_table()
5573 ci_parse_pplib_clock_info(rdev, ci_parse_power_table()
5574 &rdev->pm.dpm.ps[i], k, ci_parse_power_table()
5580 rdev->pm.dpm.num_ps = state_array->ucNumEntries; ci_parse_power_table()
5585 clock_array_index = rdev->pm.dpm.vce_states[i].clk_idx; ci_parse_power_table()
5592 rdev->pm.dpm.vce_states[i].sclk = sclk; ci_parse_power_table()
5593 rdev->pm.dpm.vce_states[i].mclk = mclk; ci_parse_power_table()
5599 static int ci_get_vbios_boot_values(struct radeon_device *rdev, ci_get_vbios_boot_values() argument
5602 struct radeon_mode_info *mode_info = &rdev->mode_info; ci_get_vbios_boot_values()
5616 boot_state->pcie_gen_bootup_value = ci_get_current_pcie_speed(rdev); ci_get_vbios_boot_values()
5617 boot_state->pcie_lane_bootup_value = ci_get_current_pcie_lane_number(rdev); ci_get_vbios_boot_values()
5626 void ci_dpm_fini(struct radeon_device *rdev) ci_dpm_fini() argument
5630 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { ci_dpm_fini()
5631 kfree(rdev->pm.dpm.ps[i].ps_priv); ci_dpm_fini()
5633 kfree(rdev->pm.dpm.ps); ci_dpm_fini()
5634 kfree(rdev->pm.dpm.priv); ci_dpm_fini()
5635 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries); ci_dpm_fini()
5636 r600_free_extended_power_table(rdev); ci_dpm_fini()
5639 int ci_dpm_init(struct radeon_device *rdev) ci_dpm_init() argument
5653 rdev->pm.dpm.priv = pi; ci_dpm_init()
5655 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); ci_dpm_init()
5672 ret = ci_get_vbios_boot_values(rdev, &pi->vbios_boot_state); ci_dpm_init()
5674 ci_dpm_fini(rdev); ci_dpm_init()
5678 ret = r600_get_platform_caps(rdev); ci_dpm_init()
5680 ci_dpm_fini(rdev); ci_dpm_init()
5684 ret = r600_parse_extended_power_table(rdev); ci_dpm_init()
5686 ci_dpm_fini(rdev); ci_dpm_init()
5690 ret = ci_parse_power_table(rdev); ci_dpm_init()
5692 ci_dpm_fini(rdev); ci_dpm_init()
5716 if ((rdev->pdev->device == 0x6658) && ci_dpm_init()
5717 (rdev->mc_fw->size == (BONAIRE_MC_UCODE_SIZE * 4))) { ci_dpm_init()
5728 ci_initialize_powertune_defaults(rdev); ci_dpm_init()
5737 ci_get_leakage_voltages(rdev); ci_dpm_init()
5738 ci_patch_dependency_tables_with_leakage(rdev); ci_dpm_init()
5739 ci_set_private_data_variables_based_on_pptable(rdev); ci_dpm_init()
5741 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries = ci_dpm_init()
5743 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) { ci_dpm_init()
5744 ci_dpm_fini(rdev); ci_dpm_init()
5747 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4; ci_dpm_init()
5748 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0; ci_dpm_init()
5749 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0; ci_dpm_init()
5750 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000; ci_dpm_init()
5751 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720; ci_dpm_init()
5752 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000; ci_dpm_init()
5753 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810; ci_dpm_init()
5754 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000; ci_dpm_init()
5755 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900; ci_dpm_init()
5757 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 4; ci_dpm_init()
5758 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 15000; ci_dpm_init()
5759 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200; ci_dpm_init()
5761 rdev->pm.dpm.dyn_state.valid_sclk_values.count = 0; ci_dpm_init()
5762 rdev->pm.dpm.dyn_state.valid_sclk_values.values = NULL; ci_dpm_init()
5763 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0; ci_dpm_init()
5764 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL; ci_dpm_init()
5766 if (rdev->family == CHIP_HAWAII) { ci_dpm_init()
5780 gpio = radeon_atombios_lookup_gpio(rdev, VDDC_VRHOT_GPIO_PINID); ci_dpm_init()
5783 rdev->pm.dpm.platform_caps |= ATOM_PP_PLATFORM_CAP_REGULATOR_HOT; ci_dpm_init()
5786 rdev->pm.dpm.platform_caps &= ~ATOM_PP_PLATFORM_CAP_REGULATOR_HOT; ci_dpm_init()
5789 gpio = radeon_atombios_lookup_gpio(rdev, PP_AC_DC_SWITCH_GPIO_PINID); ci_dpm_init()
5792 rdev->pm.dpm.platform_caps |= ATOM_PP_PLATFORM_CAP_HARDWAREDC; ci_dpm_init()
5795 rdev->pm.dpm.platform_caps &= ~ATOM_PP_PLATFORM_CAP_HARDWAREDC; ci_dpm_init()
5798 gpio = radeon_atombios_lookup_gpio(rdev, VDDC_PCC_GPIO_PINID); ci_dpm_init()
5830 if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_VDDC, VOLTAGE_OBJ_GPIO_LUT)) ci_dpm_init()
5832 else if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_VDDC, VOLTAGE_OBJ_SVID2)) ci_dpm_init()
5835 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_VDDCI_CONTROL) { ci_dpm_init()
5836 if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_VDDCI, VOLTAGE_OBJ_GPIO_LUT)) ci_dpm_init()
5838 else if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_VDDCI, VOLTAGE_OBJ_SVID2)) ci_dpm_init()
5841 rdev->pm.dpm.platform_caps &= ~ATOM_PP_PLATFORM_CAP_VDDCI_CONTROL; ci_dpm_init()
5844 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_MVDDCONTROL) { ci_dpm_init()
5845 if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_MVDDC, VOLTAGE_OBJ_GPIO_LUT)) ci_dpm_init()
5847 else if (radeon_atom_is_voltage_gpio(rdev, VOLTAGE_TYPE_MVDDC, VOLTAGE_OBJ_SVID2)) ci_dpm_init()
5850 rdev->pm.dpm.platform_caps &= ~ATOM_PP_PLATFORM_CAP_MVDDCONTROL; ci_dpm_init()
5857 radeon_acpi_is_pcie_performance_request_supported(rdev); ci_dpm_init()
5862 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, ci_dpm_init()
5873 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) ci_dpm_init()
5883 if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) || ci_dpm_init()
5884 (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0)) ci_dpm_init()
5885 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc = ci_dpm_init()
5886 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ci_dpm_init()
5893 void ci_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, ci_dpm_debugfs_print_current_performance_level() argument
5896 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_debugfs_print_current_performance_level()
5898 u32 sclk = ci_get_average_sclk_freq(rdev); ci_dpm_debugfs_print_current_performance_level()
5899 u32 mclk = ci_get_average_mclk_freq(rdev); ci_dpm_debugfs_print_current_performance_level()
5907 void ci_dpm_print_power_state(struct radeon_device *rdev, ci_dpm_print_power_state() argument
5922 r600_dpm_print_ps_status(rdev, rps); ci_dpm_print_power_state()
5925 u32 ci_dpm_get_current_sclk(struct radeon_device *rdev) ci_dpm_get_current_sclk() argument
5927 u32 sclk = ci_get_average_sclk_freq(rdev); ci_dpm_get_current_sclk()
5932 u32 ci_dpm_get_current_mclk(struct radeon_device *rdev) ci_dpm_get_current_mclk() argument
5934 u32 mclk = ci_get_average_mclk_freq(rdev); ci_dpm_get_current_mclk()
5939 u32 ci_dpm_get_sclk(struct radeon_device *rdev, bool low) ci_dpm_get_sclk() argument
5941 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_get_sclk()
5950 u32 ci_dpm_get_mclk(struct radeon_device *rdev, bool low) ci_dpm_get_mclk() argument
5952 struct ci_power_info *pi = ci_get_pi(rdev); ci_dpm_get_mclk()
H A Drs690.c35 int rs690_mc_wait_for_idle(struct radeon_device *rdev) rs690_mc_wait_for_idle() argument
40 for (i = 0; i < rdev->usec_timeout; i++) { rs690_mc_wait_for_idle()
50 static void rs690_gpu_init(struct radeon_device *rdev) rs690_gpu_init() argument
53 r420_pipes_init(rdev); rs690_gpu_init()
54 if (rs690_mc_wait_for_idle(rdev)) { rs690_gpu_init()
65 void rs690_pm_info(struct radeon_device *rdev) rs690_pm_info() argument
73 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, rs690_pm_info()
75 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); rs690_pm_info()
81 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info.ulBootUpMemoryClock)); rs690_pm_info()
82 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); rs690_pm_info()
84 rdev->pm.igp_system_mclk.full = dfixed_const(le16_to_cpu(info->info.usK8MemoryClock)); rs690_pm_info()
85 else if (rdev->clock.default_mclk) { rs690_pm_info()
86 rdev->pm.igp_system_mclk.full = dfixed_const(rdev->clock.default_mclk); rs690_pm_info()
87 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp); rs690_pm_info()
89 rdev->pm.igp_system_mclk.full = dfixed_const(400); rs690_pm_info()
90 rdev->pm.igp_ht_link_clk.full = dfixed_const(le16_to_cpu(info->info.usFSBClock)); rs690_pm_info()
91 rdev->pm.igp_ht_link_width.full = dfixed_const(info->info.ucHTLinkWidth); rs690_pm_info()
95 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info_v2.ulBootUpSidePortClock)); rs690_pm_info()
96 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); rs690_pm_info()
98 rdev->pm.igp_system_mclk.full = dfixed_const(le32_to_cpu(info->info_v2.ulBootUpUMAClock)); rs690_pm_info()
99 else if (rdev->clock.default_mclk) rs690_pm_info()
100 rdev->pm.igp_system_mclk.full = dfixed_const(rdev->clock.default_mclk); rs690_pm_info()
102 rdev->pm.igp_system_mclk.full = dfixed_const(66700); rs690_pm_info()
103 rdev->pm.igp_system_mclk.full = dfixed_div(rdev->pm.igp_system_mclk, tmp); rs690_pm_info()
104 rdev->pm.igp_ht_link_clk.full = dfixed_const(le32_to_cpu(info->info_v2.ulHTLinkFreq)); rs690_pm_info()
105 rdev->pm.igp_ht_link_clk.full = dfixed_div(rdev->pm.igp_ht_link_clk, tmp); rs690_pm_info()
106 rdev->pm.igp_ht_link_width.full = dfixed_const(le16_to_cpu(info->info_v2.usMinHTLinkWidth)); rs690_pm_info()
110 rdev->pm.igp_sideport_mclk.full = dfixed_const(200); rs690_pm_info()
111 rdev->pm.igp_system_mclk.full = dfixed_const(200); rs690_pm_info()
112 rdev->pm.igp_ht_link_clk.full = dfixed_const(1000); rs690_pm_info()
113 rdev->pm.igp_ht_link_width.full = dfixed_const(8); rs690_pm_info()
119 rdev->pm.igp_sideport_mclk.full = dfixed_const(200); rs690_pm_info()
120 rdev->pm.igp_system_mclk.full = dfixed_const(200); rs690_pm_info()
121 rdev->pm.igp_ht_link_clk.full = dfixed_const(1000); rs690_pm_info()
122 rdev->pm.igp_ht_link_width.full = dfixed_const(8); rs690_pm_info()
128 rdev->pm.k8_bandwidth.full = dfixed_mul(rdev->pm.igp_system_mclk, tmp); rs690_pm_info()
133 rdev->pm.ht_bandwidth.full = dfixed_mul(rdev->pm.igp_ht_link_clk, rs690_pm_info()
134 rdev->pm.igp_ht_link_width); rs690_pm_info()
135 rdev->pm.ht_bandwidth.full = dfixed_div(rdev->pm.ht_bandwidth, tmp); rs690_pm_info()
136 if (tmp.full < rdev->pm.max_bandwidth.full) { rs690_pm_info()
138 rdev->pm.max_bandwidth.full = tmp.full; rs690_pm_info()
144 rdev->pm.sideport_bandwidth.full = dfixed_mul(rdev->pm.igp_sideport_mclk, tmp); rs690_pm_info()
146 rdev->pm.sideport_bandwidth.full = dfixed_div(rdev->pm.sideport_bandwidth, tmp); rs690_pm_info()
149 static void rs690_mc_init(struct radeon_device *rdev) rs690_mc_init() argument
155 rs400_gart_adjust_size(rdev); rs690_mc_init()
156 rdev->mc.vram_is_ddr = true; rs690_mc_init()
157 rdev->mc.vram_width = 128; rs690_mc_init()
158 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE); rs690_mc_init()
159 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; rs690_mc_init()
160 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); rs690_mc_init()
161 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); rs690_mc_init()
162 rdev->mc.visible_vram_size = rdev->mc.aper_size; rs690_mc_init()
165 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev); rs690_mc_init()
170 if (rdev->mc.igp_sideport_enabled && rs690_mc_init()
171 (rdev->mc.real_vram_size == (384 * 1024 * 1024))) { rs690_mc_init()
173 rdev->mc.real_vram_size -= 128 * 1024 * 1024; rs690_mc_init()
174 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; rs690_mc_init()
178 rdev->fastfb_working = false; rs690_mc_init()
183 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL) rs690_mc_init()
189 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) { rs690_mc_init()
191 (unsigned long long)rdev->mc.aper_base, k8_addr); rs690_mc_init()
192 rdev->mc.aper_base = (resource_size_t)k8_addr; rs690_mc_init()
193 rdev->fastfb_working = true; rs690_mc_init()
197 rs690_pm_info(rdev); rs690_mc_init()
198 radeon_vram_location(rdev, &rdev->mc, base); rs690_mc_init()
199 rdev->mc.gtt_base_align = rdev->mc.gtt_size - 1; rs690_mc_init()
200 radeon_gtt_location(rdev, &rdev->mc); rs690_mc_init()
201 radeon_update_bandwidth_info(rdev); rs690_mc_init()
204 void rs690_line_buffer_adjust(struct radeon_device *rdev, rs690_line_buffer_adjust() argument
252 rdev->mode_info.crtcs[0]->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode1->crtc_hdisplay); rs690_line_buffer_adjust()
255 rdev->mode_info.crtcs[1]->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode2->crtc_hdisplay); rs690_line_buffer_adjust()
271 static void rs690_crtc_bandwidth_compute(struct radeon_device *rdev, rs690_crtc_bandwidth_compute() argument
289 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880)) && rs690_crtc_bandwidth_compute()
290 (rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) rs690_crtc_bandwidth_compute()
291 selected_sclk = radeon_dpm_get_sclk(rdev, low); rs690_crtc_bandwidth_compute()
293 selected_sclk = rdev->pm.current_sclk; rs690_crtc_bandwidth_compute()
302 core_bandwidth.full = dfixed_div(rdev->pm.sclk, a); rs690_crtc_bandwidth_compute()
365 if (rdev->mc.igp_sideport_enabled) { rs690_crtc_bandwidth_compute()
366 if (max_bandwidth.full > rdev->pm.sideport_bandwidth.full && rs690_crtc_bandwidth_compute()
367 rdev->pm.sideport_bandwidth.full) rs690_crtc_bandwidth_compute()
368 max_bandwidth = rdev->pm.sideport_bandwidth; rs690_crtc_bandwidth_compute()
371 b.full = dfixed_div(rdev->pm.igp_sideport_mclk, a); rs690_crtc_bandwidth_compute()
375 if (max_bandwidth.full > rdev->pm.k8_bandwidth.full && rs690_crtc_bandwidth_compute()
376 rdev->pm.k8_bandwidth.full) rs690_crtc_bandwidth_compute()
377 max_bandwidth = rdev->pm.k8_bandwidth; rs690_crtc_bandwidth_compute()
378 if (max_bandwidth.full > rdev->pm.ht_bandwidth.full && rs690_crtc_bandwidth_compute()
379 rdev->pm.ht_bandwidth.full) rs690_crtc_bandwidth_compute()
380 max_bandwidth = rdev->pm.ht_bandwidth; rs690_crtc_bandwidth_compute()
459 static void rs690_compute_mode_priority(struct radeon_device *rdev, rs690_compute_mode_priority() argument
522 if (rdev->disp_priority == 2) { rs690_compute_mode_priority()
551 if (rdev->disp_priority == 2) rs690_compute_mode_priority()
578 if (rdev->disp_priority == 2) rs690_compute_mode_priority()
583 void rs690_bandwidth_update(struct radeon_device *rdev) rs690_bandwidth_update() argument
593 if (!rdev->mode_info.mode_config_initialized) rs690_bandwidth_update()
596 radeon_update_display_priority(rdev); rs690_bandwidth_update()
598 if (rdev->mode_info.crtcs[0]->base.enabled) rs690_bandwidth_update()
599 mode0 = &rdev->mode_info.crtcs[0]->base.mode; rs690_bandwidth_update()
600 if (rdev->mode_info.crtcs[1]->base.enabled) rs690_bandwidth_update()
601 mode1 = &rdev->mode_info.crtcs[1]->base.mode; rs690_bandwidth_update()
607 if ((rdev->disp_priority == 2) && rs690_bandwidth_update()
608 ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740))) { rs690_bandwidth_update()
618 rs690_line_buffer_adjust(rdev, mode0, mode1); rs690_bandwidth_update()
620 if ((rdev->family == CHIP_RS690) || (rdev->family == CHIP_RS740)) rs690_bandwidth_update()
622 if ((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880)) rs690_bandwidth_update()
625 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0_high, false); rs690_bandwidth_update()
626 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1_high, false); rs690_bandwidth_update()
628 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0_low, true); rs690_bandwidth_update()
629 rs690_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1_low, true); rs690_bandwidth_update()
635 rs690_compute_mode_priority(rdev, rs690_bandwidth_update()
639 rs690_compute_mode_priority(rdev, rs690_bandwidth_update()
650 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg) rs690_mc_rreg() argument
655 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs690_mc_rreg()
659 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs690_mc_rreg()
663 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rs690_mc_wreg() argument
667 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rs690_mc_wreg()
672 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rs690_mc_wreg()
675 static void rs690_mc_program(struct radeon_device *rdev) rs690_mc_program() argument
680 rv515_mc_stop(rdev, &save); rs690_mc_program()
683 if (rs690_mc_wait_for_idle(rdev)) rs690_mc_program()
684 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); rs690_mc_program()
687 S_000100_MC_FB_START(rdev->mc.vram_start >> 16) | rs690_mc_program()
688 S_000100_MC_FB_TOP(rdev->mc.vram_end >> 16)); rs690_mc_program()
690 S_000134_HDP_FB_START(rdev->mc.vram_start >> 16)); rs690_mc_program()
692 rv515_mc_resume(rdev, &save); rs690_mc_program()
695 static int rs690_startup(struct radeon_device *rdev) rs690_startup() argument
699 rs690_mc_program(rdev); rs690_startup()
701 rv515_clock_startup(rdev); rs690_startup()
703 rs690_gpu_init(rdev); rs690_startup()
706 r = rs400_gart_enable(rdev); rs690_startup()
711 r = radeon_wb_init(rdev); rs690_startup()
715 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); rs690_startup()
717 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); rs690_startup()
722 if (!rdev->irq.installed) { rs690_startup()
723 r = radeon_irq_kms_init(rdev); rs690_startup()
728 rs600_irq_set(rdev); rs690_startup()
729 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); rs690_startup()
731 r = r100_cp_init(rdev, 1024 * 1024); rs690_startup()
733 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); rs690_startup()
737 r = radeon_ib_pool_init(rdev); rs690_startup()
739 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); rs690_startup()
743 r = radeon_audio_init(rdev); rs690_startup()
745 dev_err(rdev->dev, "failed initializing audio\n"); rs690_startup()
752 int rs690_resume(struct radeon_device *rdev) rs690_resume() argument
757 rs400_gart_disable(rdev); rs690_resume()
759 rv515_clock_startup(rdev); rs690_resume()
761 if (radeon_asic_reset(rdev)) { rs690_resume()
762 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", rs690_resume()
767 atom_asic_init(rdev->mode_info.atom_context); rs690_resume()
769 rv515_clock_startup(rdev); rs690_resume()
771 radeon_surface_init(rdev); rs690_resume()
773 rdev->accel_working = true; rs690_resume()
774 r = rs690_startup(rdev); rs690_resume()
776 rdev->accel_working = false; rs690_resume()
781 int rs690_suspend(struct radeon_device *rdev) rs690_suspend() argument
783 radeon_pm_suspend(rdev); rs690_suspend()
784 radeon_audio_fini(rdev); rs690_suspend()
785 r100_cp_disable(rdev); rs690_suspend()
786 radeon_wb_disable(rdev); rs690_suspend()
787 rs600_irq_disable(rdev); rs690_suspend()
788 rs400_gart_disable(rdev); rs690_suspend()
792 void rs690_fini(struct radeon_device *rdev) rs690_fini() argument
794 radeon_pm_fini(rdev); rs690_fini()
795 radeon_audio_fini(rdev); rs690_fini()
796 r100_cp_fini(rdev); rs690_fini()
797 radeon_wb_fini(rdev); rs690_fini()
798 radeon_ib_pool_fini(rdev); rs690_fini()
799 radeon_gem_fini(rdev); rs690_fini()
800 rs400_gart_fini(rdev); rs690_fini()
801 radeon_irq_kms_fini(rdev); rs690_fini()
802 radeon_fence_driver_fini(rdev); rs690_fini()
803 radeon_bo_fini(rdev); rs690_fini()
804 radeon_atombios_fini(rdev); rs690_fini()
805 kfree(rdev->bios); rs690_fini()
806 rdev->bios = NULL; rs690_fini()
809 int rs690_init(struct radeon_device *rdev) rs690_init() argument
814 rv515_vga_render_disable(rdev); rs690_init()
816 radeon_scratch_init(rdev); rs690_init()
818 radeon_surface_init(rdev); rs690_init()
820 r100_restore_sanity(rdev); rs690_init()
823 if (!radeon_get_bios(rdev)) { rs690_init()
824 if (ASIC_IS_AVIVO(rdev)) rs690_init()
827 if (rdev->is_atom_bios) { rs690_init()
828 r = radeon_atombios_init(rdev); rs690_init()
832 dev_err(rdev->dev, "Expecting atombios for RV515 GPU\n"); rs690_init()
836 if (radeon_asic_reset(rdev)) { rs690_init()
837 dev_warn(rdev->dev, rs690_init()
843 if (radeon_boot_test_post_card(rdev) == false) rs690_init()
847 radeon_get_clock_info(rdev->ddev); rs690_init()
849 rs690_mc_init(rdev); rs690_init()
850 rv515_debugfs(rdev); rs690_init()
852 r = radeon_fence_driver_init(rdev); rs690_init()
856 r = radeon_bo_init(rdev); rs690_init()
859 r = rs400_gart_init(rdev); rs690_init()
862 rs600_set_safe_registers(rdev); rs690_init()
865 radeon_pm_init(rdev); rs690_init()
867 rdev->accel_working = true; rs690_init()
868 r = rs690_startup(rdev); rs690_init()
871 dev_err(rdev->dev, "Disabling GPU acceleration\n"); rs690_init()
872 r100_cp_fini(rdev); rs690_init()
873 radeon_wb_fini(rdev); rs690_init()
874 radeon_ib_pool_fini(rdev); rs690_init()
875 rs400_gart_fini(rdev); rs690_init()
876 radeon_irq_kms_fini(rdev); rs690_init()
877 rdev->accel_working = false; rs690_init()
H A Dr600_dpm.c141 void r600_dpm_print_ps_status(struct radeon_device *rdev, r600_dpm_print_ps_status() argument
145 if (rps == rdev->pm.dpm.current_ps) r600_dpm_print_ps_status()
147 if (rps == rdev->pm.dpm.requested_ps) r600_dpm_print_ps_status()
149 if (rps == rdev->pm.dpm.boot_ps) r600_dpm_print_ps_status()
154 u32 r600_dpm_get_vblank_time(struct radeon_device *rdev) r600_dpm_get_vblank_time() argument
156 struct drm_device *dev = rdev->ddev; r600_dpm_get_vblank_time()
162 if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { r600_dpm_get_vblank_time()
180 u32 r600_dpm_get_vrefresh(struct radeon_device *rdev) r600_dpm_get_vrefresh() argument
182 struct drm_device *dev = rdev->ddev; r600_dpm_get_vrefresh()
187 if (rdev->num_crtc && rdev->mode_info.mode_config_initialized) { r600_dpm_get_vrefresh()
239 void r600_gfx_clockgating_enable(struct radeon_device *rdev, bool enable) r600_gfx_clockgating_enable() argument
250 for (i = 0; i < rdev->usec_timeout; i++) { r600_gfx_clockgating_enable()
263 void r600_dynamicpm_enable(struct radeon_device *rdev, bool enable) r600_dynamicpm_enable() argument
271 void r600_enable_thermal_protection(struct radeon_device *rdev, bool enable) r600_enable_thermal_protection() argument
279 void r600_enable_acpi_pm(struct radeon_device *rdev) r600_enable_acpi_pm() argument
284 void r600_enable_dynamic_pcie_gen2(struct radeon_device *rdev, bool enable) r600_enable_dynamic_pcie_gen2() argument
292 bool r600_dynamicpm_enabled(struct radeon_device *rdev) r600_dynamicpm_enabled() argument
300 void r600_enable_sclk_control(struct radeon_device *rdev, bool enable) r600_enable_sclk_control() argument
308 void r600_enable_mclk_control(struct radeon_device *rdev, bool enable) r600_enable_mclk_control() argument
316 void r600_enable_spll_bypass(struct radeon_device *rdev, bool enable) r600_enable_spll_bypass() argument
324 void r600_wait_for_spll_change(struct radeon_device *rdev) r600_wait_for_spll_change() argument
328 for (i = 0; i < rdev->usec_timeout; i++) { r600_wait_for_spll_change()
335 void r600_set_bsp(struct radeon_device *rdev, u32 u, u32 p) r600_set_bsp() argument
340 void r600_set_at(struct radeon_device *rdev, r600_set_at() argument
348 void r600_set_tc(struct radeon_device *rdev, r600_set_tc() argument
354 void r600_select_td(struct radeon_device *rdev, r600_select_td() argument
367 void r600_set_vrc(struct radeon_device *rdev, u32 vrv) r600_set_vrc() argument
372 void r600_set_tpu(struct radeon_device *rdev, u32 u) r600_set_tpu() argument
377 void r600_set_tpc(struct radeon_device *rdev, u32 c) r600_set_tpc() argument
382 void r600_set_sstu(struct radeon_device *rdev, u32 u) r600_set_sstu() argument
387 void r600_set_sst(struct radeon_device *rdev, u32 t) r600_set_sst() argument
392 void r600_set_git(struct radeon_device *rdev, u32 t) r600_set_git() argument
397 void r600_set_fctu(struct radeon_device *rdev, u32 u) r600_set_fctu() argument
402 void r600_set_fct(struct radeon_device *rdev, u32 t) r600_set_fct() argument
407 void r600_set_ctxcgtt3d_rphc(struct radeon_device *rdev, u32 p) r600_set_ctxcgtt3d_rphc() argument
412 void r600_set_ctxcgtt3d_rsdc(struct radeon_device *rdev, u32 s) r600_set_ctxcgtt3d_rsdc() argument
417 void r600_set_vddc3d_oorsu(struct radeon_device *rdev, u32 u) r600_set_vddc3d_oorsu() argument
422 void r600_set_vddc3d_oorphc(struct radeon_device *rdev, u32 p) r600_set_vddc3d_oorphc() argument
427 void r600_set_vddc3d_oorsdc(struct radeon_device *rdev, u32 s) r600_set_vddc3d_oorsdc() argument
432 void r600_set_mpll_lock_time(struct radeon_device *rdev, u32 lock_time) r600_set_mpll_lock_time() argument
437 void r600_set_mpll_reset_time(struct radeon_device *rdev, u32 reset_time) r600_set_mpll_reset_time() argument
442 void r600_engine_clock_entry_enable(struct radeon_device *rdev, r600_engine_clock_entry_enable() argument
453 void r600_engine_clock_entry_enable_pulse_skipping(struct radeon_device *rdev, r600_engine_clock_entry_enable_pulse_skipping() argument
464 void r600_engine_clock_entry_enable_post_divider(struct radeon_device *rdev, r600_engine_clock_entry_enable_post_divider() argument
475 void r600_engine_clock_entry_set_post_divider(struct radeon_device *rdev, r600_engine_clock_entry_set_post_divider() argument
482 void r600_engine_clock_entry_set_reference_divider(struct radeon_device *rdev, r600_engine_clock_entry_set_reference_divider() argument
489 void r600_engine_clock_entry_set_feedback_divider(struct radeon_device *rdev, r600_engine_clock_entry_set_feedback_divider() argument
496 void r600_engine_clock_entry_set_step_time(struct radeon_device *rdev, r600_engine_clock_entry_set_step_time() argument
503 void r600_vid_rt_set_ssu(struct radeon_device *rdev, u32 u) r600_vid_rt_set_ssu() argument
508 void r600_vid_rt_set_vru(struct radeon_device *rdev, u32 u) r600_vid_rt_set_vru() argument
513 void r600_vid_rt_set_vrt(struct radeon_device *rdev, u32 rt) r600_vid_rt_set_vrt() argument
518 void r600_voltage_control_enable_pins(struct radeon_device *rdev, r600_voltage_control_enable_pins() argument
526 void r600_voltage_control_program_voltages(struct radeon_device *rdev, r600_voltage_control_program_voltages() argument
540 void r600_voltage_control_deactivate_static_control(struct radeon_device *rdev, r600_voltage_control_deactivate_static_control() argument
558 void r600_power_level_enable(struct radeon_device *rdev, r600_power_level_enable() argument
571 void r600_power_level_set_voltage_index(struct radeon_device *rdev, r600_power_level_set_voltage_index() argument
580 void r600_power_level_set_mem_clock_index(struct radeon_device *rdev, r600_power_level_set_mem_clock_index() argument
589 void r600_power_level_set_eng_clock_index(struct radeon_device *rdev, r600_power_level_set_eng_clock_index() argument
598 void r600_power_level_set_watermark_id(struct radeon_device *rdev, r600_power_level_set_watermark_id() argument
610 void r600_power_level_set_pcie_gen2(struct radeon_device *rdev, r600_power_level_set_pcie_gen2() argument
621 enum r600_power_level r600_power_level_get_current_index(struct radeon_device *rdev) r600_power_level_get_current_index() argument
630 enum r600_power_level r600_power_level_get_target_index(struct radeon_device *rdev) r600_power_level_get_target_index() argument
639 void r600_power_level_set_enter_index(struct radeon_device *rdev, r600_power_level_set_enter_index() argument
646 void r600_wait_for_power_level_unequal(struct radeon_device *rdev, r600_wait_for_power_level_unequal() argument
651 for (i = 0; i < rdev->usec_timeout; i++) { r600_wait_for_power_level_unequal()
652 if (r600_power_level_get_target_index(rdev) != index) r600_wait_for_power_level_unequal()
657 for (i = 0; i < rdev->usec_timeout; i++) { r600_wait_for_power_level_unequal()
658 if (r600_power_level_get_current_index(rdev) != index) r600_wait_for_power_level_unequal()
664 void r600_wait_for_power_level(struct radeon_device *rdev, r600_wait_for_power_level() argument
669 for (i = 0; i < rdev->usec_timeout; i++) { r600_wait_for_power_level()
670 if (r600_power_level_get_target_index(rdev) == index) r600_wait_for_power_level()
675 for (i = 0; i < rdev->usec_timeout; i++) { r600_wait_for_power_level()
676 if (r600_power_level_get_current_index(rdev) == index) r600_wait_for_power_level()
682 void r600_start_dpm(struct radeon_device *rdev) r600_start_dpm() argument
684 r600_enable_sclk_control(rdev, false); r600_start_dpm()
685 r600_enable_mclk_control(rdev, false); r600_start_dpm()
687 r600_dynamicpm_enable(rdev, true); r600_start_dpm()
689 radeon_wait_for_vblank(rdev, 0); r600_start_dpm()
690 radeon_wait_for_vblank(rdev, 1); r600_start_dpm()
692 r600_enable_spll_bypass(rdev, true); r600_start_dpm()
693 r600_wait_for_spll_change(rdev); r600_start_dpm()
694 r600_enable_spll_bypass(rdev, false); r600_start_dpm()
695 r600_wait_for_spll_change(rdev); r600_start_dpm()
697 r600_enable_spll_bypass(rdev, true); r600_start_dpm()
698 r600_wait_for_spll_change(rdev); r600_start_dpm()
699 r600_enable_spll_bypass(rdev, false); r600_start_dpm()
700 r600_wait_for_spll_change(rdev); r600_start_dpm()
702 r600_enable_sclk_control(rdev, true); r600_start_dpm()
703 r600_enable_mclk_control(rdev, true); r600_start_dpm()
706 void r600_stop_dpm(struct radeon_device *rdev) r600_stop_dpm() argument
708 r600_dynamicpm_enable(rdev, false); r600_stop_dpm()
711 int r600_dpm_pre_set_power_state(struct radeon_device *rdev) r600_dpm_pre_set_power_state() argument
716 void r600_dpm_post_set_power_state(struct radeon_device *rdev) r600_dpm_post_set_power_state() argument
736 static int r600_set_thermal_temperature_range(struct radeon_device *rdev, r600_set_thermal_temperature_range() argument
755 rdev->pm.dpm.thermal.min_temp = low_temp; r600_set_thermal_temperature_range()
756 rdev->pm.dpm.thermal.max_temp = high_temp; r600_set_thermal_temperature_range()
784 int r600_dpm_late_enable(struct radeon_device *rdev) r600_dpm_late_enable() argument
788 if (rdev->irq.installed && r600_dpm_late_enable()
789 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { r600_dpm_late_enable()
790 ret = r600_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); r600_dpm_late_enable()
793 rdev->irq.dpm_thermal = true; r600_dpm_late_enable()
794 radeon_irq_set(rdev); r600_dpm_late_enable()
842 int r600_get_platform_caps(struct radeon_device *rdev) r600_get_platform_caps() argument
844 struct radeon_mode_info *mode_info = &rdev->mode_info; r600_get_platform_caps()
855 rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps); r600_get_platform_caps()
856 rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime); r600_get_platform_caps()
857 rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime); r600_get_platform_caps()
870 int r600_parse_extended_power_table(struct radeon_device *rdev) r600_parse_extended_power_table() argument
872 struct radeon_mode_info *mode_info = &rdev->mode_info; r600_parse_extended_power_table()
892 rdev->pm.dpm.fan.t_hyst = fan_info->fan.ucTHyst; r600_parse_extended_power_table()
893 rdev->pm.dpm.fan.t_min = le16_to_cpu(fan_info->fan.usTMin); r600_parse_extended_power_table()
894 rdev->pm.dpm.fan.t_med = le16_to_cpu(fan_info->fan.usTMed); r600_parse_extended_power_table()
895 rdev->pm.dpm.fan.t_high = le16_to_cpu(fan_info->fan.usTHigh); r600_parse_extended_power_table()
896 rdev->pm.dpm.fan.pwm_min = le16_to_cpu(fan_info->fan.usPWMMin); r600_parse_extended_power_table()
897 rdev->pm.dpm.fan.pwm_med = le16_to_cpu(fan_info->fan.usPWMMed); r600_parse_extended_power_table()
898 rdev->pm.dpm.fan.pwm_high = le16_to_cpu(fan_info->fan.usPWMHigh); r600_parse_extended_power_table()
900 rdev->pm.dpm.fan.t_max = le16_to_cpu(fan_info->fan2.usTMax); r600_parse_extended_power_table()
902 rdev->pm.dpm.fan.t_max = 10900; r600_parse_extended_power_table()
903 rdev->pm.dpm.fan.cycle_delay = 100000; r600_parse_extended_power_table()
905 rdev->pm.dpm.fan.control_mode = fan_info->fan3.ucFanControlMode; r600_parse_extended_power_table()
906 rdev->pm.dpm.fan.default_max_fan_pwm = r600_parse_extended_power_table()
908 rdev->pm.dpm.fan.default_fan_output_sensitivity = 4836; r600_parse_extended_power_table()
909 rdev->pm.dpm.fan.fan_output_sensitivity = r600_parse_extended_power_table()
912 rdev->pm.dpm.fan.ucode_fan_control = true; r600_parse_extended_power_table()
923 ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, r600_parse_extended_power_table()
932 ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, r600_parse_extended_power_table()
935 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); r600_parse_extended_power_table()
943 ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, r600_parse_extended_power_table()
946 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); r600_parse_extended_power_table()
947 kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); r600_parse_extended_power_table()
955 ret = r600_parse_clk_voltage_dep_table(&rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk, r600_parse_extended_power_table()
958 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries); r600_parse_extended_power_table()
959 kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries); r600_parse_extended_power_table()
960 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries); r600_parse_extended_power_table()
970 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk = r600_parse_extended_power_table()
973 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk = r600_parse_extended_power_table()
976 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc = r600_parse_extended_power_table()
978 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddci = r600_parse_extended_power_table()
989 rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries = r600_parse_extended_power_table()
993 if (!rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries) { r600_parse_extended_power_table()
994 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1000 rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].sclk = r600_parse_extended_power_table()
1002 rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].mclk = r600_parse_extended_power_table()
1004 rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].voltage = r600_parse_extended_power_table()
1009 rdev->pm.dpm.dyn_state.phase_shedding_limits_table.count = r600_parse_extended_power_table()
1017 rdev->pm.dpm.tdp_limit = le32_to_cpu(power_info->pplib5.ulTDPLimit); r600_parse_extended_power_table()
1018 rdev->pm.dpm.near_tdp_limit = le32_to_cpu(power_info->pplib5.ulNearTDPLimit); r600_parse_extended_power_table()
1019 rdev->pm.dpm.near_tdp_limit_adjusted = rdev->pm.dpm.near_tdp_limit; r600_parse_extended_power_table()
1020 rdev->pm.dpm.tdp_od_limit = le16_to_cpu(power_info->pplib5.usTDPODLimit); r600_parse_extended_power_table()
1021 if (rdev->pm.dpm.tdp_od_limit) r600_parse_extended_power_table()
1022 rdev->pm.dpm.power_control = true; r600_parse_extended_power_table()
1024 rdev->pm.dpm.power_control = false; r600_parse_extended_power_table()
1025 rdev->pm.dpm.tdp_adjustment = 0; r600_parse_extended_power_table()
1026 rdev->pm.dpm.sq_ramping_threshold = le32_to_cpu(power_info->pplib5.ulSQRampingThreshold); r600_parse_extended_power_table()
1027 rdev->pm.dpm.cac_leakage = le32_to_cpu(power_info->pplib5.ulCACLeakage); r600_parse_extended_power_table()
1028 rdev->pm.dpm.load_line_slope = le16_to_cpu(power_info->pplib5.usLoadLineSlope); r600_parse_extended_power_table()
1036 rdev->pm.dpm.dyn_state.cac_leakage_table.entries = kzalloc(size, GFP_KERNEL); r600_parse_extended_power_table()
1037 if (!rdev->pm.dpm.dyn_state.cac_leakage_table.entries) { r600_parse_extended_power_table()
1038 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1043 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_EVV) { r600_parse_extended_power_table()
1044 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1 = r600_parse_extended_power_table()
1046 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2 = r600_parse_extended_power_table()
1048 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3 = r600_parse_extended_power_table()
1051 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc = r600_parse_extended_power_table()
1053 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage = r600_parse_extended_power_table()
1059 rdev->pm.dpm.dyn_state.cac_leakage_table.count = cac_table->ucNumEntries; r600_parse_extended_power_table()
1090 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries = r600_parse_extended_power_table()
1092 if (!rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries) { r600_parse_extended_power_table()
1093 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1096 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.count = r600_parse_extended_power_table()
1104 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].evclk = r600_parse_extended_power_table()
1106 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].ecclk = r600_parse_extended_power_table()
1108 rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v = r600_parse_extended_power_table()
1119 rdev->pm.dpm.vce_states[i].evclk = r600_parse_extended_power_table()
1121 rdev->pm.dpm.vce_states[i].ecclk = r600_parse_extended_power_table()
1123 rdev->pm.dpm.vce_states[i].clk_idx = r600_parse_extended_power_table()
1125 rdev->pm.dpm.vce_states[i].pstate = r600_parse_extended_power_table()
1144 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries = r600_parse_extended_power_table()
1146 if (!rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries) { r600_parse_extended_power_table()
1147 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1150 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count = r600_parse_extended_power_table()
1157 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].vclk = r600_parse_extended_power_table()
1159 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].dclk = r600_parse_extended_power_table()
1161 rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v = r600_parse_extended_power_table()
1176 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries = r600_parse_extended_power_table()
1178 if (!rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries) { r600_parse_extended_power_table()
1179 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1182 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.count = r600_parse_extended_power_table()
1186 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].clk = r600_parse_extended_power_table()
1188 rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v = r600_parse_extended_power_table()
1199 rdev->pm.dpm.dyn_state.ppm_table = r600_parse_extended_power_table()
1201 if (!rdev->pm.dpm.dyn_state.ppm_table) { r600_parse_extended_power_table()
1202 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1205 rdev->pm.dpm.dyn_state.ppm_table->ppm_design = ppm->ucPpmDesign; r600_parse_extended_power_table()
1206 rdev->pm.dpm.dyn_state.ppm_table->cpu_core_number = r600_parse_extended_power_table()
1208 rdev->pm.dpm.dyn_state.ppm_table->platform_tdp = r600_parse_extended_power_table()
1210 rdev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdp = r600_parse_extended_power_table()
1212 rdev->pm.dpm.dyn_state.ppm_table->platform_tdc = r600_parse_extended_power_table()
1214 rdev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdc = r600_parse_extended_power_table()
1216 rdev->pm.dpm.dyn_state.ppm_table->apu_tdp = r600_parse_extended_power_table()
1218 rdev->pm.dpm.dyn_state.ppm_table->dgpu_tdp = r600_parse_extended_power_table()
1220 rdev->pm.dpm.dyn_state.ppm_table->dgpu_ulv_power = r600_parse_extended_power_table()
1222 rdev->pm.dpm.dyn_state.ppm_table->tj_max = r600_parse_extended_power_table()
1234 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries = r600_parse_extended_power_table()
1236 if (!rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries) { r600_parse_extended_power_table()
1237 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1240 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.count = r600_parse_extended_power_table()
1244 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].clk = r600_parse_extended_power_table()
1246 rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v = r600_parse_extended_power_table()
1257 rdev->pm.dpm.dyn_state.cac_tdp_table = r600_parse_extended_power_table()
1259 if (!rdev->pm.dpm.dyn_state.cac_tdp_table) { r600_parse_extended_power_table()
1260 r600_free_extended_power_table(rdev); r600_parse_extended_power_table()
1267 rdev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit = r600_parse_extended_power_table()
1274 rdev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit = 255; r600_parse_extended_power_table()
1277 rdev->pm.dpm.dyn_state.cac_tdp_table->tdp = le16_to_cpu(pt->usTDP); r600_parse_extended_power_table()
1278 rdev->pm.dpm.dyn_state.cac_tdp_table->configurable_tdp = r600_parse_extended_power_table()
1280 rdev->pm.dpm.dyn_state.cac_tdp_table->tdc = le16_to_cpu(pt->usTDC); r600_parse_extended_power_table()
1281 rdev->pm.dpm.dyn_state.cac_tdp_table->battery_power_limit = r600_parse_extended_power_table()
1283 rdev->pm.dpm.dyn_state.cac_tdp_table->small_power_limit = r600_parse_extended_power_table()
1285 rdev->pm.dpm.dyn_state.cac_tdp_table->low_cac_leakage = r600_parse_extended_power_table()
1287 rdev->pm.dpm.dyn_state.cac_tdp_table->high_cac_leakage = r600_parse_extended_power_table()
1295 void r600_free_extended_power_table(struct radeon_device *rdev) r600_free_extended_power_table() argument
1297 struct radeon_dpm_dynamic_state *dyn_state = &rdev->pm.dpm.dyn_state; r600_free_extended_power_table()
1313 enum radeon_pcie_gen r600_get_pcie_gen_support(struct radeon_device *rdev, r600_get_pcie_gen_support() argument
1336 u16 r600_get_pcie_lane_support(struct radeon_device *rdev, r600_get_pcie_lane_support() argument
H A Dradeon_bios.c45 static bool igp_read_bios_from_vram(struct radeon_device *rdev) igp_read_bios_from_vram() argument
51 if (!(rdev->flags & RADEON_IS_IGP)) igp_read_bios_from_vram()
52 if (!radeon_card_posted(rdev)) igp_read_bios_from_vram()
55 rdev->bios = NULL; igp_read_bios_from_vram()
56 vram_base = pci_resource_start(rdev->pdev, 0); igp_read_bios_from_vram()
66 rdev->bios = kmalloc(size, GFP_KERNEL); igp_read_bios_from_vram()
67 if (rdev->bios == NULL) { igp_read_bios_from_vram()
71 memcpy_fromio(rdev->bios, bios, size); igp_read_bios_from_vram()
76 static bool radeon_read_bios(struct radeon_device *rdev) radeon_read_bios() argument
81 rdev->bios = NULL; radeon_read_bios()
83 bios = pci_map_rom(rdev->pdev, &size); radeon_read_bios()
92 pci_unmap_rom(rdev->pdev, bios); radeon_read_bios()
95 rdev->bios = kzalloc(size, GFP_KERNEL); radeon_read_bios()
96 if (rdev->bios == NULL) { radeon_read_bios()
97 pci_unmap_rom(rdev->pdev, bios); radeon_read_bios()
100 memcpy_fromio(rdev->bios, bios, size); radeon_read_bios()
101 pci_unmap_rom(rdev->pdev, bios); radeon_read_bios()
105 static bool radeon_read_platform_bios(struct radeon_device *rdev) radeon_read_platform_bios() argument
110 rdev->bios = NULL; radeon_read_platform_bios()
112 bios = pci_platform_rom(rdev->pdev, &size); radeon_read_platform_bios()
120 rdev->bios = kmemdup(bios, size, GFP_KERNEL); radeon_read_platform_bios()
121 if (rdev->bios == NULL) { radeon_read_platform_bios()
176 static bool radeon_atrm_get_bios(struct radeon_device *rdev) radeon_atrm_get_bios() argument
187 if (rdev->flags & RADEON_IS_IGP) radeon_atrm_get_bios()
219 rdev->bios = kmalloc(size, GFP_KERNEL); radeon_atrm_get_bios()
220 if (!rdev->bios) { radeon_atrm_get_bios()
227 rdev->bios, radeon_atrm_get_bios()
234 if (i == 0 || rdev->bios[0] != 0x55 || rdev->bios[1] != 0xaa) { radeon_atrm_get_bios()
235 kfree(rdev->bios); radeon_atrm_get_bios()
241 static inline bool radeon_atrm_get_bios(struct radeon_device *rdev) radeon_atrm_get_bios() argument
247 static bool ni_read_disabled_bios(struct radeon_device *rdev) ni_read_disabled_bios() argument
264 if (!ASIC_IS_NODCE(rdev)) { ni_read_disabled_bios()
277 r = radeon_read_bios(rdev); ni_read_disabled_bios()
281 if (!ASIC_IS_NODCE(rdev)) { ni_read_disabled_bios()
290 static bool r700_read_disabled_bios(struct radeon_device *rdev) r700_read_disabled_bios() argument
323 if (rdev->family == CHIP_RV730) { r700_read_disabled_bios()
339 r = radeon_read_bios(rdev); r700_read_disabled_bios()
342 if (rdev->family == CHIP_RV730) { r700_read_disabled_bios()
359 static bool r600_read_disabled_bios(struct radeon_device *rdev) r600_read_disabled_bios() argument
418 r = radeon_read_bios(rdev); r600_read_disabled_bios()
436 static bool avivo_read_disabled_bios(struct radeon_device *rdev) avivo_read_disabled_bios() argument
482 r = radeon_read_bios(rdev); avivo_read_disabled_bios()
497 static bool legacy_read_disabled_bios(struct radeon_device *rdev) legacy_read_disabled_bios() argument
510 if (rdev->flags & RADEON_IS_PCIE) legacy_read_disabled_bios()
519 if (rdev->ddev->pdev->device == PCI_DEVICE_ID_ATI_RADEON_QY) { legacy_read_disabled_bios()
523 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { legacy_read_disabled_bios()
535 if (rdev->flags & RADEON_IS_PCIE) legacy_read_disabled_bios()
545 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { legacy_read_disabled_bios()
556 if (rdev->ddev->pdev->device == PCI_DEVICE_ID_ATI_RADEON_QY) { legacy_read_disabled_bios()
560 r = radeon_read_bios(rdev); legacy_read_disabled_bios()
565 if (rdev->flags & RADEON_IS_PCIE) legacy_read_disabled_bios()
570 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { legacy_read_disabled_bios()
574 if (rdev->ddev->pdev->device == PCI_DEVICE_ID_ATI_RADEON_QY) { legacy_read_disabled_bios()
580 static bool radeon_read_disabled_bios(struct radeon_device *rdev) radeon_read_disabled_bios() argument
582 if (rdev->flags & RADEON_IS_IGP) radeon_read_disabled_bios()
583 return igp_read_bios_from_vram(rdev); radeon_read_disabled_bios()
584 else if (rdev->family >= CHIP_BARTS) radeon_read_disabled_bios()
585 return ni_read_disabled_bios(rdev); radeon_read_disabled_bios()
586 else if (rdev->family >= CHIP_RV770) radeon_read_disabled_bios()
587 return r700_read_disabled_bios(rdev); radeon_read_disabled_bios()
588 else if (rdev->family >= CHIP_R600) radeon_read_disabled_bios()
589 return r600_read_disabled_bios(rdev); radeon_read_disabled_bios()
590 else if (rdev->family >= CHIP_RS600) radeon_read_disabled_bios()
591 return avivo_read_disabled_bios(rdev); radeon_read_disabled_bios()
593 return legacy_read_disabled_bios(rdev); radeon_read_disabled_bios()
597 static bool radeon_acpi_vfct_bios(struct radeon_device *rdev) radeon_acpi_vfct_bios() argument
625 if (vhdr->PCIBus != rdev->pdev->bus->number || radeon_acpi_vfct_bios()
626 vhdr->PCIDevice != PCI_SLOT(rdev->pdev->devfn) || radeon_acpi_vfct_bios()
627 vhdr->PCIFunction != PCI_FUNC(rdev->pdev->devfn) || radeon_acpi_vfct_bios()
628 vhdr->VendorID != rdev->pdev->vendor || radeon_acpi_vfct_bios()
629 vhdr->DeviceID != rdev->pdev->device) { radeon_acpi_vfct_bios()
639 rdev->bios = kmemdup(&vbios->VbiosContent, vhdr->ImageLength, GFP_KERNEL); radeon_acpi_vfct_bios()
640 ret = !!rdev->bios; radeon_acpi_vfct_bios()
646 static inline bool radeon_acpi_vfct_bios(struct radeon_device *rdev) radeon_acpi_vfct_bios() argument
652 bool radeon_get_bios(struct radeon_device *rdev) radeon_get_bios() argument
657 r = radeon_atrm_get_bios(rdev); radeon_get_bios()
659 r = radeon_acpi_vfct_bios(rdev); radeon_get_bios()
661 r = igp_read_bios_from_vram(rdev); radeon_get_bios()
663 r = radeon_read_bios(rdev); radeon_get_bios()
665 r = radeon_read_disabled_bios(rdev); radeon_get_bios()
667 r = radeon_read_platform_bios(rdev); radeon_get_bios()
668 if (r == false || rdev->bios == NULL) { radeon_get_bios()
670 rdev->bios = NULL; radeon_get_bios()
673 if (rdev->bios[0] != 0x55 || rdev->bios[1] != 0xaa) { radeon_get_bios()
674 printk("BIOS signature incorrect %x %x\n", rdev->bios[0], rdev->bios[1]); radeon_get_bios()
684 rdev->bios_header_start = RBIOS16(0x48); radeon_get_bios()
685 if (!rdev->bios_header_start) { radeon_get_bios()
688 tmp = rdev->bios_header_start + 4; radeon_get_bios()
689 if (!memcmp(rdev->bios + tmp, "ATOM", 4) || radeon_get_bios()
690 !memcmp(rdev->bios + tmp, "MOTA", 4)) { radeon_get_bios()
691 rdev->is_atom_bios = true; radeon_get_bios()
693 rdev->is_atom_bios = false; radeon_get_bios()
696 DRM_DEBUG("%sBIOS detected\n", rdev->is_atom_bios ? "ATOM" : "COM"); radeon_get_bios()
699 kfree(rdev->bios); radeon_get_bios()
700 rdev->bios = NULL; radeon_get_bios()
H A Drv515.c38 static int rv515_debugfs_pipes_info_init(struct radeon_device *rdev);
39 static int rv515_debugfs_ga_info_init(struct radeon_device *rdev);
40 static void rv515_gpu_init(struct radeon_device *rdev);
41 int rv515_mc_wait_for_idle(struct radeon_device *rdev);
49 void rv515_debugfs(struct radeon_device *rdev) rv515_debugfs() argument
51 if (r100_debugfs_rbbm_init(rdev)) { rv515_debugfs()
54 if (rv515_debugfs_pipes_info_init(rdev)) { rv515_debugfs()
57 if (rv515_debugfs_ga_info_init(rdev)) { rv515_debugfs()
62 void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring) rv515_ring_start() argument
66 r = radeon_ring_lock(rdev, ring, 64); rv515_ring_start()
85 radeon_ring_write(ring, (1 << rdev->num_gb_pipes) - 1); rv515_ring_start()
127 radeon_ring_unlock_commit(rdev, ring, false); rv515_ring_start()
130 int rv515_mc_wait_for_idle(struct radeon_device *rdev) rv515_mc_wait_for_idle() argument
135 for (i = 0; i < rdev->usec_timeout; i++) { rv515_mc_wait_for_idle()
146 void rv515_vga_render_disable(struct radeon_device *rdev) rv515_vga_render_disable() argument
152 static void rv515_gpu_init(struct radeon_device *rdev) rv515_gpu_init() argument
156 if (r100_gui_wait_for_idle(rdev)) { rv515_gpu_init()
160 rv515_vga_render_disable(rdev); rv515_gpu_init()
161 r420_pipes_init(rdev); rv515_gpu_init()
168 if (r100_gui_wait_for_idle(rdev)) { rv515_gpu_init()
172 if (rv515_mc_wait_for_idle(rdev)) { rv515_gpu_init()
178 static void rv515_vram_get_type(struct radeon_device *rdev) rv515_vram_get_type() argument
182 rdev->mc.vram_width = 128; rv515_vram_get_type()
183 rdev->mc.vram_is_ddr = true; rv515_vram_get_type()
187 rdev->mc.vram_width = 64; rv515_vram_get_type()
190 rdev->mc.vram_width = 128; rv515_vram_get_type()
193 rdev->mc.vram_width = 128; rv515_vram_get_type()
198 static void rv515_mc_init(struct radeon_device *rdev) rv515_mc_init() argument
201 rv515_vram_get_type(rdev); rv515_mc_init()
202 r100_vram_init_sizes(rdev); rv515_mc_init()
203 radeon_vram_location(rdev, &rdev->mc, 0); rv515_mc_init()
204 rdev->mc.gtt_base_align = 0; rv515_mc_init()
205 if (!(rdev->flags & RADEON_IS_AGP)) rv515_mc_init()
206 radeon_gtt_location(rdev, &rdev->mc); rv515_mc_init()
207 radeon_update_bandwidth_info(rdev); rv515_mc_init()
210 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg) rv515_mc_rreg() argument
215 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rv515_mc_rreg()
219 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rv515_mc_rreg()
224 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) rv515_mc_wreg() argument
228 spin_lock_irqsave(&rdev->mc_idx_lock, flags); rv515_mc_wreg()
232 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags); rv515_mc_wreg()
240 struct radeon_device *rdev = dev->dev_private; rv515_debugfs_pipes_info() local
258 struct radeon_device *rdev = dev->dev_private; rv515_debugfs_ga_info() local
263 radeon_asic_reset(rdev); rv515_debugfs_ga_info()
278 static int rv515_debugfs_pipes_info_init(struct radeon_device *rdev) rv515_debugfs_pipes_info_init() argument
281 return radeon_debugfs_add_files(rdev, rv515_pipes_info_list, 1); rv515_debugfs_pipes_info_init()
287 static int rv515_debugfs_ga_info_init(struct radeon_device *rdev) rv515_debugfs_ga_info_init() argument
290 return radeon_debugfs_add_files(rdev, rv515_ga_info_list, 1); rv515_debugfs_ga_info_init()
296 void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save) rv515_mc_stop() argument
307 for (i = 0; i < rdev->num_crtc; i++) { rv515_mc_stop()
313 radeon_wait_for_vblank(rdev, i); rv515_mc_stop()
320 frame_count = radeon_get_vblank_counter(rdev, i); rv515_mc_stop()
321 for (j = 0; j < rdev->usec_timeout; j++) { rv515_mc_stop()
322 if (radeon_get_vblank_counter(rdev, i) != frame_count) rv515_mc_stop()
340 radeon_mc_wait_for_idle(rdev); rv515_mc_stop()
342 if (rdev->family >= CHIP_R600) { rv515_mc_stop()
343 if (rdev->family >= CHIP_RV770) rv515_mc_stop()
352 if (rdev->family >= CHIP_RV770) rv515_mc_stop()
362 for (i = 0; i < rdev->num_crtc; i++) { rv515_mc_stop()
378 void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save) rv515_mc_resume() argument
384 for (i = 0; i < rdev->num_crtc; i++) { rv515_mc_resume()
385 if (rdev->family >= CHIP_RV770) { rv515_mc_resume()
388 upper_32_bits(rdev->mc.vram_start)); rv515_mc_resume()
390 upper_32_bits(rdev->mc.vram_start)); rv515_mc_resume()
393 upper_32_bits(rdev->mc.vram_start)); rv515_mc_resume()
395 upper_32_bits(rdev->mc.vram_start)); rv515_mc_resume()
399 (u32)rdev->mc.vram_start); rv515_mc_resume()
401 (u32)rdev->mc.vram_start); rv515_mc_resume()
403 WREG32(R_000310_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); rv515_mc_resume()
406 for (i = 0; i < rdev->num_crtc; i++) { rv515_mc_resume()
424 for (j = 0; j < rdev->usec_timeout; j++) { rv515_mc_resume()
433 if (rdev->family >= CHIP_R600) { rv515_mc_resume()
435 if (rdev->family >= CHIP_RV770) rv515_mc_resume()
440 if (rdev->family >= CHIP_RV770) rv515_mc_resume()
448 for (i = 0; i < rdev->num_crtc; i++) { rv515_mc_resume()
454 frame_count = radeon_get_vblank_counter(rdev, i); rv515_mc_resume()
455 for (j = 0; j < rdev->usec_timeout; j++) { rv515_mc_resume()
456 if (radeon_get_vblank_counter(rdev, i) != frame_count) rv515_mc_resume()
468 static void rv515_mc_program(struct radeon_device *rdev) rv515_mc_program() argument
473 rv515_mc_stop(rdev, &save); rv515_mc_program()
476 if (rv515_mc_wait_for_idle(rdev)) rv515_mc_program()
477 dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n"); rv515_mc_program()
479 WREG32(R_0000F8_CONFIG_MEMSIZE, rdev->mc.real_vram_size); rv515_mc_program()
482 S_000001_MC_FB_START(rdev->mc.vram_start >> 16) | rv515_mc_program()
483 S_000001_MC_FB_TOP(rdev->mc.vram_end >> 16)); rv515_mc_program()
485 S_000134_HDP_FB_START(rdev->mc.vram_start >> 16)); rv515_mc_program()
486 if (rdev->flags & RADEON_IS_AGP) { rv515_mc_program()
488 S_000002_MC_AGP_START(rdev->mc.gtt_start >> 16) | rv515_mc_program()
489 S_000002_MC_AGP_TOP(rdev->mc.gtt_end >> 16)); rv515_mc_program()
490 WREG32_MC(R_000003_MC_AGP_BASE, lower_32_bits(rdev->mc.agp_base)); rv515_mc_program()
492 S_000004_AGP_BASE_ADDR_2(upper_32_bits(rdev->mc.agp_base))); rv515_mc_program()
499 rv515_mc_resume(rdev, &save); rv515_mc_program()
502 void rv515_clock_startup(struct radeon_device *rdev) rv515_clock_startup() argument
505 radeon_atom_set_clock_gating(rdev, 1); rv515_clock_startup()
515 static int rv515_startup(struct radeon_device *rdev) rv515_startup() argument
519 rv515_mc_program(rdev); rv515_startup()
521 rv515_clock_startup(rdev); rv515_startup()
523 rv515_gpu_init(rdev); rv515_startup()
526 if (rdev->flags & RADEON_IS_PCIE) { rv515_startup()
527 r = rv370_pcie_gart_enable(rdev); rv515_startup()
533 r = radeon_wb_init(rdev); rv515_startup()
537 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); rv515_startup()
539 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); rv515_startup()
544 if (!rdev->irq.installed) { rv515_startup()
545 r = radeon_irq_kms_init(rdev); rv515_startup()
550 rs600_irq_set(rdev); rv515_startup()
551 rdev->config.r300.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); rv515_startup()
553 r = r100_cp_init(rdev, 1024 * 1024); rv515_startup()
555 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); rv515_startup()
559 r = radeon_ib_pool_init(rdev); rv515_startup()
561 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); rv515_startup()
568 int rv515_resume(struct radeon_device *rdev) rv515_resume() argument
573 if (rdev->flags & RADEON_IS_PCIE) rv515_resume()
574 rv370_pcie_gart_disable(rdev); rv515_resume()
576 rv515_clock_startup(rdev); rv515_resume()
578 if (radeon_asic_reset(rdev)) { rv515_resume()
579 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", rv515_resume()
584 atom_asic_init(rdev->mode_info.atom_context); rv515_resume()
586 rv515_clock_startup(rdev); rv515_resume()
588 radeon_surface_init(rdev); rv515_resume()
590 rdev->accel_working = true; rv515_resume()
591 r = rv515_startup(rdev); rv515_resume()
593 rdev->accel_working = false; rv515_resume()
598 int rv515_suspend(struct radeon_device *rdev) rv515_suspend() argument
600 radeon_pm_suspend(rdev); rv515_suspend()
601 r100_cp_disable(rdev); rv515_suspend()
602 radeon_wb_disable(rdev); rv515_suspend()
603 rs600_irq_disable(rdev); rv515_suspend()
604 if (rdev->flags & RADEON_IS_PCIE) rv515_suspend()
605 rv370_pcie_gart_disable(rdev); rv515_suspend()
609 void rv515_set_safe_registers(struct radeon_device *rdev) rv515_set_safe_registers() argument
611 rdev->config.r300.reg_safe_bm = rv515_reg_safe_bm; rv515_set_safe_registers()
612 rdev->config.r300.reg_safe_bm_size = ARRAY_SIZE(rv515_reg_safe_bm); rv515_set_safe_registers()
615 void rv515_fini(struct radeon_device *rdev) rv515_fini() argument
617 radeon_pm_fini(rdev); rv515_fini()
618 r100_cp_fini(rdev); rv515_fini()
619 radeon_wb_fini(rdev); rv515_fini()
620 radeon_ib_pool_fini(rdev); rv515_fini()
621 radeon_gem_fini(rdev); rv515_fini()
622 rv370_pcie_gart_fini(rdev); rv515_fini()
623 radeon_agp_fini(rdev); rv515_fini()
624 radeon_irq_kms_fini(rdev); rv515_fini()
625 radeon_fence_driver_fini(rdev); rv515_fini()
626 radeon_bo_fini(rdev); rv515_fini()
627 radeon_atombios_fini(rdev); rv515_fini()
628 kfree(rdev->bios); rv515_fini()
629 rdev->bios = NULL; rv515_fini()
632 int rv515_init(struct radeon_device *rdev) rv515_init() argument
637 radeon_scratch_init(rdev); rv515_init()
639 radeon_surface_init(rdev); rv515_init()
642 r100_restore_sanity(rdev); rv515_init()
644 if (!radeon_get_bios(rdev)) { rv515_init()
645 if (ASIC_IS_AVIVO(rdev)) rv515_init()
648 if (rdev->is_atom_bios) { rv515_init()
649 r = radeon_atombios_init(rdev); rv515_init()
653 dev_err(rdev->dev, "Expecting atombios for RV515 GPU\n"); rv515_init()
657 if (radeon_asic_reset(rdev)) { rv515_init()
658 dev_warn(rdev->dev, rv515_init()
664 if (radeon_boot_test_post_card(rdev) == false) rv515_init()
667 radeon_get_clock_info(rdev->ddev); rv515_init()
669 if (rdev->flags & RADEON_IS_AGP) { rv515_init()
670 r = radeon_agp_init(rdev); rv515_init()
672 radeon_agp_disable(rdev); rv515_init()
676 rv515_mc_init(rdev); rv515_init()
677 rv515_debugfs(rdev); rv515_init()
679 r = radeon_fence_driver_init(rdev); rv515_init()
683 r = radeon_bo_init(rdev); rv515_init()
686 r = rv370_pcie_gart_init(rdev); rv515_init()
689 rv515_set_safe_registers(rdev); rv515_init()
692 radeon_pm_init(rdev); rv515_init()
694 rdev->accel_working = true; rv515_init()
695 r = rv515_startup(rdev); rv515_init()
698 dev_err(rdev->dev, "Disabling GPU acceleration\n"); rv515_init()
699 r100_cp_fini(rdev); rv515_init()
700 radeon_wb_fini(rdev); rv515_init()
701 radeon_ib_pool_fini(rdev); rv515_init()
702 radeon_irq_kms_fini(rdev); rv515_init()
703 rv370_pcie_gart_fini(rdev); rv515_init()
704 radeon_agp_fini(rdev); rv515_init()
705 rdev->accel_working = false; rv515_init()
710 void atom_rv515_force_tv_scaler(struct radeon_device *rdev, struct radeon_crtc *crtc) atom_rv515_force_tv_scaler() argument
953 static void rv515_crtc_bandwidth_compute(struct radeon_device *rdev, rv515_crtc_bandwidth_compute() argument
972 if ((rdev->family >= CHIP_RV610) && rv515_crtc_bandwidth_compute()
973 (rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) rv515_crtc_bandwidth_compute()
974 selected_sclk = radeon_dpm_get_sclk(rdev, low); rv515_crtc_bandwidth_compute()
976 selected_sclk = rdev->pm.current_sclk; rv515_crtc_bandwidth_compute()
1109 static void rv515_compute_mode_priority(struct radeon_device *rdev, rv515_compute_mode_priority() argument
1172 if (rdev->disp_priority == 2) { rv515_compute_mode_priority()
1201 if (rdev->disp_priority == 2) rv515_compute_mode_priority()
1228 if (rdev->disp_priority == 2) rv515_compute_mode_priority()
1233 void rv515_bandwidth_avivo_update(struct radeon_device *rdev) rv515_bandwidth_avivo_update() argument
1243 if (rdev->mode_info.crtcs[0]->base.enabled) rv515_bandwidth_avivo_update()
1244 mode0 = &rdev->mode_info.crtcs[0]->base.mode; rv515_bandwidth_avivo_update()
1245 if (rdev->mode_info.crtcs[1]->base.enabled) rv515_bandwidth_avivo_update()
1246 mode1 = &rdev->mode_info.crtcs[1]->base.mode; rv515_bandwidth_avivo_update()
1247 rs690_line_buffer_adjust(rdev, mode0, mode1); rv515_bandwidth_avivo_update()
1249 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0_high, false); rv515_bandwidth_avivo_update()
1250 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1_high, false); rv515_bandwidth_avivo_update()
1252 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[0], &wm0_low, false); rv515_bandwidth_avivo_update()
1253 rv515_crtc_bandwidth_compute(rdev, rdev->mode_info.crtcs[1], &wm1_low, false); rv515_bandwidth_avivo_update()
1259 rv515_compute_mode_priority(rdev, rv515_bandwidth_avivo_update()
1263 rv515_compute_mode_priority(rdev, rv515_bandwidth_avivo_update()
1274 void rv515_bandwidth_update(struct radeon_device *rdev) rv515_bandwidth_update() argument
1280 if (!rdev->mode_info.mode_config_initialized) rv515_bandwidth_update()
1283 radeon_update_display_priority(rdev); rv515_bandwidth_update()
1285 if (rdev->mode_info.crtcs[0]->base.enabled) rv515_bandwidth_update()
1286 mode0 = &rdev->mode_info.crtcs[0]->base.mode; rv515_bandwidth_update()
1287 if (rdev->mode_info.crtcs[1]->base.enabled) rv515_bandwidth_update()
1288 mode1 = &rdev->mode_info.crtcs[1]->base.mode; rv515_bandwidth_update()
1294 if ((rdev->disp_priority == 2) && rv515_bandwidth_update()
1295 (rdev->family == CHIP_RV515)) { rv515_bandwidth_update()
1305 rv515_bandwidth_avivo_update(rdev); rv515_bandwidth_update()
H A Drs780_dpm.c41 static struct igp_power_info *rs780_get_pi(struct radeon_device *rdev) rs780_get_pi() argument
43 struct igp_power_info *pi = rdev->pm.dpm.priv; rs780_get_pi()
48 static void rs780_get_pm_mode_parameters(struct radeon_device *rdev) rs780_get_pm_mode_parameters() argument
50 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_get_pm_mode_parameters()
51 struct radeon_mode_info *minfo = &rdev->mode_info; rs780_get_pm_mode_parameters()
60 for (i = 0; i < rdev->num_crtc; i++) { rs780_get_pm_mode_parameters()
72 static void rs780_voltage_scaling_enable(struct radeon_device *rdev, bool enable);
74 static int rs780_initialize_dpm_power_state(struct radeon_device *rdev, rs780_initialize_dpm_power_state() argument
81 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_initialize_dpm_power_state()
86 r600_engine_clock_entry_set_reference_divider(rdev, 0, dividers.ref_div); rs780_initialize_dpm_power_state()
87 r600_engine_clock_entry_set_feedback_divider(rdev, 0, dividers.fb_div); rs780_initialize_dpm_power_state()
88 r600_engine_clock_entry_set_post_divider(rdev, 0, dividers.post_div); rs780_initialize_dpm_power_state()
91 r600_engine_clock_entry_enable_post_divider(rdev, 0, true); rs780_initialize_dpm_power_state()
93 r600_engine_clock_entry_enable_post_divider(rdev, 0, false); rs780_initialize_dpm_power_state()
95 r600_engine_clock_entry_set_step_time(rdev, 0, R600_SST_DFLT); rs780_initialize_dpm_power_state()
96 r600_engine_clock_entry_enable_pulse_skipping(rdev, 0, false); rs780_initialize_dpm_power_state()
98 r600_engine_clock_entry_enable(rdev, 0, true); rs780_initialize_dpm_power_state()
100 r600_engine_clock_entry_enable(rdev, i, false); rs780_initialize_dpm_power_state()
102 r600_enable_mclk_control(rdev, false); rs780_initialize_dpm_power_state()
103 r600_voltage_control_enable_pins(rdev, 0); rs780_initialize_dpm_power_state()
108 static int rs780_initialize_dpm_parameters(struct radeon_device *rdev, rs780_initialize_dpm_parameters() argument
114 r600_set_bsp(rdev, R600_BSU_DFLT, R600_BSP_DFLT); rs780_initialize_dpm_parameters()
116 r600_set_at(rdev, 0, 0, 0, 0); rs780_initialize_dpm_parameters()
118 r600_set_git(rdev, R600_GICST_DFLT); rs780_initialize_dpm_parameters()
121 r600_set_tc(rdev, i, 0, 0); rs780_initialize_dpm_parameters()
123 r600_select_td(rdev, R600_TD_DFLT); rs780_initialize_dpm_parameters()
124 r600_set_vrc(rdev, 0); rs780_initialize_dpm_parameters()
126 r600_set_tpu(rdev, R600_TPU_DFLT); rs780_initialize_dpm_parameters()
127 r600_set_tpc(rdev, R600_TPC_DFLT); rs780_initialize_dpm_parameters()
129 r600_set_sstu(rdev, R600_SSTU_DFLT); rs780_initialize_dpm_parameters()
130 r600_set_sst(rdev, R600_SST_DFLT); rs780_initialize_dpm_parameters()
132 r600_set_fctu(rdev, R600_FCTU_DFLT); rs780_initialize_dpm_parameters()
133 r600_set_fct(rdev, R600_FCT_DFLT); rs780_initialize_dpm_parameters()
135 r600_set_vddc3d_oorsu(rdev, R600_VDDC3DOORSU_DFLT); rs780_initialize_dpm_parameters()
136 r600_set_vddc3d_oorphc(rdev, R600_VDDC3DOORPHC_DFLT); rs780_initialize_dpm_parameters()
137 r600_set_vddc3d_oorsdc(rdev, R600_VDDC3DOORSDC_DFLT); rs780_initialize_dpm_parameters()
138 r600_set_ctxcgtt3d_rphc(rdev, R600_CTXCGTT3DRPHC_DFLT); rs780_initialize_dpm_parameters()
139 r600_set_ctxcgtt3d_rsdc(rdev, R600_CTXCGTT3DRSDC_DFLT); rs780_initialize_dpm_parameters()
141 r600_vid_rt_set_vru(rdev, R600_VRU_DFLT); rs780_initialize_dpm_parameters()
142 r600_vid_rt_set_vrt(rdev, R600_VOLTAGERESPONSETIME_DFLT); rs780_initialize_dpm_parameters()
143 r600_vid_rt_set_ssu(rdev, R600_SPLLSTEPUNIT_DFLT); rs780_initialize_dpm_parameters()
145 ret = rs780_initialize_dpm_power_state(rdev, boot_ps); rs780_initialize_dpm_parameters()
147 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_LOW, 0); rs780_initialize_dpm_parameters()
148 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_MEDIUM, 0); rs780_initialize_dpm_parameters()
149 r600_power_level_set_voltage_index(rdev, R600_POWER_LEVEL_HIGH, 0); rs780_initialize_dpm_parameters()
151 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_LOW, 0); rs780_initialize_dpm_parameters()
152 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, 0); rs780_initialize_dpm_parameters()
153 r600_power_level_set_mem_clock_index(rdev, R600_POWER_LEVEL_HIGH, 0); rs780_initialize_dpm_parameters()
155 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_LOW, 0); rs780_initialize_dpm_parameters()
156 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_MEDIUM, 0); rs780_initialize_dpm_parameters()
157 r600_power_level_set_eng_clock_index(rdev, R600_POWER_LEVEL_HIGH, 0); rs780_initialize_dpm_parameters()
159 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_LOW, R600_DISPLAY_WATERMARK_HIGH); rs780_initialize_dpm_parameters()
160 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_MEDIUM, R600_DISPLAY_WATERMARK_HIGH); rs780_initialize_dpm_parameters()
161 r600_power_level_set_watermark_id(rdev, R600_POWER_LEVEL_HIGH, R600_DISPLAY_WATERMARK_HIGH); rs780_initialize_dpm_parameters()
163 r600_power_level_enable(rdev, R600_POWER_LEVEL_CTXSW, false); rs780_initialize_dpm_parameters()
164 r600_power_level_enable(rdev, R600_POWER_LEVEL_HIGH, false); rs780_initialize_dpm_parameters()
165 r600_power_level_enable(rdev, R600_POWER_LEVEL_MEDIUM, false); rs780_initialize_dpm_parameters()
166 r600_power_level_enable(rdev, R600_POWER_LEVEL_LOW, true); rs780_initialize_dpm_parameters()
168 r600_power_level_set_enter_index(rdev, R600_POWER_LEVEL_LOW); rs780_initialize_dpm_parameters()
170 r600_set_vrc(rdev, RS780_CGFTV_DFLT); rs780_initialize_dpm_parameters()
175 static void rs780_start_dpm(struct radeon_device *rdev) rs780_start_dpm() argument
177 r600_enable_sclk_control(rdev, false); rs780_start_dpm()
178 r600_enable_mclk_control(rdev, false); rs780_start_dpm()
180 r600_dynamicpm_enable(rdev, true); rs780_start_dpm()
182 radeon_wait_for_vblank(rdev, 0); rs780_start_dpm()
183 radeon_wait_for_vblank(rdev, 1); rs780_start_dpm()
185 r600_enable_spll_bypass(rdev, true); rs780_start_dpm()
186 r600_wait_for_spll_change(rdev); rs780_start_dpm()
187 r600_enable_spll_bypass(rdev, false); rs780_start_dpm()
188 r600_wait_for_spll_change(rdev); rs780_start_dpm()
190 r600_enable_spll_bypass(rdev, true); rs780_start_dpm()
191 r600_wait_for_spll_change(rdev); rs780_start_dpm()
192 r600_enable_spll_bypass(rdev, false); rs780_start_dpm()
193 r600_wait_for_spll_change(rdev); rs780_start_dpm()
195 r600_enable_sclk_control(rdev, true); rs780_start_dpm()
199 static void rs780_preset_ranges_slow_clk_fbdiv_en(struct radeon_device *rdev) rs780_preset_ranges_slow_clk_fbdiv_en() argument
209 static void rs780_preset_starting_fbdiv(struct radeon_device *rdev) rs780_preset_starting_fbdiv() argument
222 static void rs780_voltage_scaling_init(struct radeon_device *rdev) rs780_voltage_scaling_init() argument
224 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_voltage_scaling_init()
225 struct drm_device *dev = rdev->ddev; rs780_voltage_scaling_init()
276 rs780_voltage_scaling_enable(rdev, true); rs780_voltage_scaling_init()
303 static void rs780_clk_scaling_enable(struct radeon_device *rdev, bool enable) rs780_clk_scaling_enable() argument
313 static void rs780_voltage_scaling_enable(struct radeon_device *rdev, bool enable) rs780_voltage_scaling_enable() argument
321 static void rs780_set_engine_clock_wfc(struct radeon_device *rdev) rs780_set_engine_clock_wfc() argument
336 static void rs780_set_engine_clock_sc(struct radeon_device *rdev) rs780_set_engine_clock_sc() argument
347 static void rs780_set_engine_clock_tdc(struct radeon_device *rdev) rs780_set_engine_clock_tdc() argument
352 static void rs780_set_engine_clock_ssc(struct radeon_device *rdev) rs780_set_engine_clock_ssc() argument
362 static void rs780_program_at(struct radeon_device *rdev) rs780_program_at() argument
364 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_program_at()
373 static void rs780_disable_vbios_powersaving(struct radeon_device *rdev) rs780_disable_vbios_powersaving() argument
378 static void rs780_force_voltage(struct radeon_device *rdev, u16 voltage) rs780_force_voltage() argument
380 struct igp_ps *current_state = rs780_get_ps(rdev->pm.dpm.current_ps); rs780_force_voltage()
405 static void rs780_force_fbdiv(struct radeon_device *rdev, u32 fb_div) rs780_force_fbdiv() argument
407 struct igp_ps *current_state = rs780_get_ps(rdev->pm.dpm.current_ps); rs780_force_fbdiv()
425 static int rs780_set_engine_clock_scaling(struct radeon_device *rdev, rs780_set_engine_clock_scaling() argument
438 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_set_engine_clock_scaling()
443 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_set_engine_clock_scaling()
448 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_set_engine_clock_scaling()
459 rs780_force_fbdiv(rdev, max_dividers.fb_div); rs780_set_engine_clock_scaling()
473 static void rs780_set_engine_clock_spc(struct radeon_device *rdev, rs780_set_engine_clock_spc() argument
479 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_set_engine_clock_spc()
492 static void rs780_activate_engine_clk_scaling(struct radeon_device *rdev, rs780_activate_engine_clk_scaling() argument
506 rs780_clk_scaling_enable(rdev, true); rs780_activate_engine_clk_scaling()
509 static u32 rs780_get_voltage_for_vddc_level(struct radeon_device *rdev, rs780_get_voltage_for_vddc_level() argument
512 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_get_voltage_for_vddc_level()
522 static void rs780_enable_voltage_scaling(struct radeon_device *rdev, rs780_enable_voltage_scaling() argument
526 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_enable_voltage_scaling()
535 vddc_high = rs780_get_voltage_for_vddc_level(rdev, rs780_enable_voltage_scaling()
537 vddc_low = rs780_get_voltage_for_vddc_level(rdev, rs780_enable_voltage_scaling()
563 static void rs780_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, rs780_set_uvd_clock_before_set_eng_clock() argument
577 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rs780_set_uvd_clock_before_set_eng_clock()
580 static void rs780_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, rs780_set_uvd_clock_after_set_eng_clock() argument
594 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); rs780_set_uvd_clock_after_set_eng_clock()
597 int rs780_dpm_enable(struct radeon_device *rdev) rs780_dpm_enable() argument
599 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_enable()
600 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; rs780_dpm_enable()
603 rs780_get_pm_mode_parameters(rdev); rs780_dpm_enable()
604 rs780_disable_vbios_powersaving(rdev); rs780_dpm_enable()
606 if (r600_dynamicpm_enabled(rdev)) rs780_dpm_enable()
608 ret = rs780_initialize_dpm_parameters(rdev, boot_ps); rs780_dpm_enable()
611 rs780_start_dpm(rdev); rs780_dpm_enable()
613 rs780_preset_ranges_slow_clk_fbdiv_en(rdev); rs780_dpm_enable()
614 rs780_preset_starting_fbdiv(rdev); rs780_dpm_enable()
616 rs780_voltage_scaling_init(rdev); rs780_dpm_enable()
617 rs780_clk_scaling_enable(rdev, true); rs780_dpm_enable()
618 rs780_set_engine_clock_sc(rdev); rs780_dpm_enable()
619 rs780_set_engine_clock_wfc(rdev); rs780_dpm_enable()
620 rs780_program_at(rdev); rs780_dpm_enable()
621 rs780_set_engine_clock_tdc(rdev); rs780_dpm_enable()
622 rs780_set_engine_clock_ssc(rdev); rs780_dpm_enable()
625 r600_gfx_clockgating_enable(rdev, true); rs780_dpm_enable()
630 void rs780_dpm_disable(struct radeon_device *rdev) rs780_dpm_disable() argument
632 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_disable()
634 r600_dynamicpm_enable(rdev, false); rs780_dpm_disable()
636 rs780_clk_scaling_enable(rdev, false); rs780_dpm_disable()
637 rs780_voltage_scaling_enable(rdev, false); rs780_dpm_disable()
640 r600_gfx_clockgating_enable(rdev, false); rs780_dpm_disable()
642 if (rdev->irq.installed && rs780_dpm_disable()
643 (rdev->pm.int_thermal_type == THERMAL_TYPE_RV6XX)) { rs780_dpm_disable()
644 rdev->irq.dpm_thermal = false; rs780_dpm_disable()
645 radeon_irq_set(rdev); rs780_dpm_disable()
649 int rs780_dpm_set_power_state(struct radeon_device *rdev) rs780_dpm_set_power_state() argument
651 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_set_power_state()
652 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; rs780_dpm_set_power_state()
653 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; rs780_dpm_set_power_state()
656 rs780_get_pm_mode_parameters(rdev); rs780_dpm_set_power_state()
658 rs780_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); rs780_dpm_set_power_state()
661 rs780_force_voltage(rdev, pi->max_voltage); rs780_dpm_set_power_state()
665 ret = rs780_set_engine_clock_scaling(rdev, new_ps, old_ps); rs780_dpm_set_power_state()
668 rs780_set_engine_clock_spc(rdev, new_ps, old_ps); rs780_dpm_set_power_state()
670 rs780_activate_engine_clk_scaling(rdev, new_ps, old_ps); rs780_dpm_set_power_state()
673 rs780_enable_voltage_scaling(rdev, new_ps); rs780_dpm_set_power_state()
675 rs780_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); rs780_dpm_set_power_state()
680 void rs780_dpm_setup_asic(struct radeon_device *rdev) rs780_dpm_setup_asic() argument
685 void rs780_dpm_display_configuration_changed(struct radeon_device *rdev) rs780_dpm_display_configuration_changed() argument
687 rs780_get_pm_mode_parameters(rdev); rs780_dpm_display_configuration_changed()
688 rs780_program_at(rdev); rs780_dpm_display_configuration_changed()
717 static void rs780_parse_pplib_non_clock_info(struct radeon_device *rdev, rs780_parse_pplib_non_clock_info() argument
742 rdev->pm.dpm.boot_ps = rps; rs780_parse_pplib_non_clock_info()
744 rdev->pm.dpm.uvd_ps = rps; rs780_parse_pplib_non_clock_info()
747 static void rs780_parse_pplib_clock_info(struct radeon_device *rdev, rs780_parse_pplib_clock_info() argument
782 ps->sclk_low = rdev->clock.default_sclk; rs780_parse_pplib_clock_info()
783 ps->sclk_high = rdev->clock.default_sclk; rs780_parse_pplib_clock_info()
789 static int rs780_parse_power_table(struct radeon_device *rdev) rs780_parse_power_table() argument
791 struct radeon_mode_info *mode_info = &rdev->mode_info; rs780_parse_power_table()
807 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * rs780_parse_power_table()
809 if (!rdev->pm.dpm.ps) rs780_parse_power_table()
830 kfree(rdev->pm.dpm.ps); rs780_parse_power_table()
833 rdev->pm.dpm.ps[i].ps_priv = ps; rs780_parse_power_table()
834 rs780_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], rs780_parse_power_table()
837 rs780_parse_pplib_clock_info(rdev, rs780_parse_power_table()
838 &rdev->pm.dpm.ps[i], rs780_parse_power_table()
842 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; rs780_parse_power_table()
846 int rs780_dpm_init(struct radeon_device *rdev) rs780_dpm_init() argument
858 rdev->pm.dpm.priv = pi; rs780_dpm_init()
860 ret = r600_get_platform_caps(rdev); rs780_dpm_init()
864 ret = rs780_parse_power_table(rdev); rs780_dpm_init()
871 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, rs780_dpm_init()
873 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); rs780_dpm_init()
933 radeon_dpm_fini(rdev); rs780_dpm_init()
937 void rs780_dpm_print_power_state(struct radeon_device *rdev, rs780_dpm_print_power_state() argument
949 r600_dpm_print_ps_status(rdev, rps); rs780_dpm_print_power_state()
952 void rs780_dpm_fini(struct radeon_device *rdev) rs780_dpm_fini() argument
956 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { rs780_dpm_fini()
957 kfree(rdev->pm.dpm.ps[i].ps_priv); rs780_dpm_fini()
959 kfree(rdev->pm.dpm.ps); rs780_dpm_fini()
960 kfree(rdev->pm.dpm.priv); rs780_dpm_fini()
963 u32 rs780_dpm_get_sclk(struct radeon_device *rdev, bool low) rs780_dpm_get_sclk() argument
965 struct igp_ps *requested_state = rs780_get_ps(rdev->pm.dpm.requested_ps); rs780_dpm_get_sclk()
973 u32 rs780_dpm_get_mclk(struct radeon_device *rdev, bool low) rs780_dpm_get_mclk() argument
975 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_get_mclk()
980 void rs780_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, rs780_dpm_debugfs_print_current_performance_level() argument
983 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rs780_dpm_debugfs_print_current_performance_level()
990 u32 sclk = (rdev->clock.spll.reference_freq * current_fb_div) / rs780_dpm_debugfs_print_current_performance_level()
1005 u32 rs780_dpm_get_current_sclk(struct radeon_device *rdev) rs780_dpm_get_current_sclk() argument
1012 u32 sclk = (rdev->clock.spll.reference_freq * current_fb_div) / rs780_dpm_get_current_sclk()
1019 u32 rs780_dpm_get_current_mclk(struct radeon_device *rdev) rs780_dpm_get_current_mclk() argument
1021 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_get_current_mclk()
1026 int rs780_dpm_force_performance_level(struct radeon_device *rdev, rs780_dpm_force_performance_level() argument
1029 struct igp_power_info *pi = rs780_get_pi(rdev); rs780_dpm_force_performance_level()
1030 struct radeon_ps *rps = rdev->pm.dpm.current_ps; rs780_dpm_force_performance_level()
1035 rs780_clk_scaling_enable(rdev, false); rs780_dpm_force_performance_level()
1036 rs780_voltage_scaling_enable(rdev, false); rs780_dpm_force_performance_level()
1040 rs780_force_voltage(rdev, pi->max_voltage); rs780_dpm_force_performance_level()
1042 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_dpm_force_performance_level()
1047 rs780_force_fbdiv(rdev, dividers.fb_div); rs780_dpm_force_performance_level()
1049 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rs780_dpm_force_performance_level()
1054 rs780_force_fbdiv(rdev, dividers.fb_div); rs780_dpm_force_performance_level()
1057 rs780_force_voltage(rdev, pi->min_voltage); rs780_dpm_force_performance_level()
1060 rs780_force_voltage(rdev, pi->max_voltage); rs780_dpm_force_performance_level()
1064 rs780_clk_scaling_enable(rdev, true); rs780_dpm_force_performance_level()
1068 rs780_voltage_scaling_enable(rdev, true); rs780_dpm_force_performance_level()
1069 rs780_enable_voltage_scaling(rdev, rps); rs780_dpm_force_performance_level()
1073 rdev->pm.dpm.forced_level = level; rs780_dpm_force_performance_level()
H A Dradeon_kms.c58 struct radeon_device *rdev = dev->dev_private; radeon_driver_unload_kms() local
60 if (rdev == NULL) radeon_driver_unload_kms()
63 if (rdev->rmmio == NULL) radeon_driver_unload_kms()
68 radeon_kfd_device_fini(rdev); radeon_driver_unload_kms()
70 radeon_acpi_fini(rdev); radeon_driver_unload_kms()
72 radeon_modeset_fini(rdev); radeon_driver_unload_kms()
73 radeon_device_fini(rdev); radeon_driver_unload_kms()
76 kfree(rdev); radeon_driver_unload_kms()
96 struct radeon_device *rdev; radeon_driver_load_kms() local
99 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); radeon_driver_load_kms()
100 if (rdev == NULL) { radeon_driver_load_kms()
103 dev->dev_private = (void *)rdev; radeon_driver_load_kms()
125 r = radeon_device_init(rdev, dev, dev->pdev, flags); radeon_driver_load_kms()
135 r = radeon_modeset_init(rdev); radeon_driver_load_kms()
143 acpi_status = radeon_acpi_init(rdev); radeon_driver_load_kms()
149 radeon_kfd_device_probe(rdev); radeon_driver_load_kms()
150 radeon_kfd_device_init(rdev); radeon_driver_load_kms()
184 struct radeon_device *rdev = dev->dev_private; radeon_set_filp_rights() local
186 mutex_lock(&rdev->gem.mutex); radeon_set_filp_rights()
197 mutex_unlock(&rdev->gem.mutex); radeon_set_filp_rights()
206 * @rdev: radeon device pointer
217 struct radeon_device *rdev = dev->dev_private; radeon_info_ioctl() local
219 struct radeon_mode_info *minfo = &rdev->mode_info; radeon_info_ioctl()
234 *value = rdev->num_gb_pipes; radeon_info_ioctl()
237 *value = rdev->num_z_pipes; radeon_info_ioctl()
241 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) radeon_info_ioctl()
244 *value = rdev->accel_working; radeon_info_ioctl()
251 for (i = 0, found = 0; i < rdev->num_crtc; i++) { radeon_info_ioctl()
266 if (rdev->family == CHIP_HAWAII) { radeon_info_ioctl()
267 if (rdev->accel_working) { radeon_info_ioctl()
268 if (rdev->new_fw) radeon_info_ioctl()
276 *value = rdev->accel_working; radeon_info_ioctl()
280 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
281 *value = rdev->config.cik.tile_config; radeon_info_ioctl()
282 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
283 *value = rdev->config.si.tile_config; radeon_info_ioctl()
284 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
285 *value = rdev->config.cayman.tile_config; radeon_info_ioctl()
286 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
287 *value = rdev->config.evergreen.tile_config; radeon_info_ioctl()
288 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
289 *value = rdev->config.rv770.tile_config; radeon_info_ioctl()
290 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
291 *value = rdev->config.r600.tile_config; radeon_info_ioctl()
312 radeon_set_filp_rights(dev, &rdev->hyperz_filp, filp, value); radeon_info_ioctl()
324 radeon_set_filp_rights(dev, &rdev->cmask_filp, filp, value); radeon_info_ioctl()
328 if (rdev->asic->get_xclk) radeon_info_ioctl()
329 *value = radeon_get_xclk(rdev) * 10; radeon_info_ioctl()
331 *value = rdev->clock.spll.reference_freq * 10; radeon_info_ioctl()
334 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
335 *value = rdev->config.cik.max_backends_per_se * radeon_info_ioctl()
336 rdev->config.cik.max_shader_engines; radeon_info_ioctl()
337 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
338 *value = rdev->config.si.max_backends_per_se * radeon_info_ioctl()
339 rdev->config.si.max_shader_engines; radeon_info_ioctl()
340 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
341 *value = rdev->config.cayman.max_backends_per_se * radeon_info_ioctl()
342 rdev->config.cayman.max_shader_engines; radeon_info_ioctl()
343 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
344 *value = rdev->config.evergreen.max_backends; radeon_info_ioctl()
345 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
346 *value = rdev->config.rv770.max_backends; radeon_info_ioctl()
347 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
348 *value = rdev->config.r600.max_backends; radeon_info_ioctl()
354 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
355 *value = rdev->config.cik.max_tile_pipes; radeon_info_ioctl()
356 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
357 *value = rdev->config.si.max_tile_pipes; radeon_info_ioctl()
358 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
359 *value = rdev->config.cayman.max_tile_pipes; radeon_info_ioctl()
360 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
361 *value = rdev->config.evergreen.max_tile_pipes; radeon_info_ioctl()
362 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
363 *value = rdev->config.rv770.max_tile_pipes; radeon_info_ioctl()
364 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
365 *value = rdev->config.r600.max_tile_pipes; radeon_info_ioctl()
374 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
375 *value = rdev->config.cik.backend_map; radeon_info_ioctl()
376 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
377 *value = rdev->config.si.backend_map; radeon_info_ioctl()
378 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
379 *value = rdev->config.cayman.backend_map; radeon_info_ioctl()
380 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
381 *value = rdev->config.evergreen.backend_map; radeon_info_ioctl()
382 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
383 *value = rdev->config.rv770.backend_map; radeon_info_ioctl()
384 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
385 *value = rdev->config.r600.backend_map; radeon_info_ioctl()
392 if (rdev->family < CHIP_CAYMAN) radeon_info_ioctl()
398 if (rdev->family < CHIP_CAYMAN) radeon_info_ioctl()
403 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
404 *value = rdev->config.cik.max_cu_per_sh; radeon_info_ioctl()
405 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
406 *value = rdev->config.si.max_cu_per_sh; radeon_info_ioctl()
407 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
408 *value = rdev->config.cayman.max_pipes_per_simd; radeon_info_ioctl()
409 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
410 *value = rdev->config.evergreen.max_pipes; radeon_info_ioctl()
411 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
412 *value = rdev->config.rv770.max_pipes; radeon_info_ioctl()
413 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
414 *value = rdev->config.r600.max_pipes; radeon_info_ioctl()
420 if (rdev->family < CHIP_R600) { radeon_info_ioctl()
426 value64 = radeon_get_gpu_clock_counter(rdev); radeon_info_ioctl()
429 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
430 *value = rdev->config.cik.max_shader_engines; radeon_info_ioctl()
431 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
432 *value = rdev->config.si.max_shader_engines; radeon_info_ioctl()
433 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
434 *value = rdev->config.cayman.max_shader_engines; radeon_info_ioctl()
435 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
436 *value = rdev->config.evergreen.num_ses; radeon_info_ioctl()
441 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
442 *value = rdev->config.cik.max_sh_per_se; radeon_info_ioctl()
443 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
444 *value = rdev->config.si.max_sh_per_se; radeon_info_ioctl()
449 *value = rdev->fastfb_working; radeon_info_ioctl()
459 *value = rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready; radeon_info_ioctl()
462 *value = rdev->ring[R600_RING_TYPE_DMA_INDEX].ready; radeon_info_ioctl()
463 *value |= rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready; radeon_info_ioctl()
466 *value = rdev->ring[R600_RING_TYPE_UVD_INDEX].ready; radeon_info_ioctl()
469 *value = rdev->ring[TN_RING_TYPE_VCE1_INDEX].ready; radeon_info_ioctl()
476 if (rdev->family >= CHIP_BONAIRE) { radeon_info_ioctl()
477 value = rdev->config.cik.tile_mode_array; radeon_info_ioctl()
479 } else if (rdev->family >= CHIP_TAHITI) { radeon_info_ioctl()
480 value = rdev->config.si.tile_mode_array; radeon_info_ioctl()
488 if (rdev->family >= CHIP_BONAIRE) { radeon_info_ioctl()
489 value = rdev->config.cik.macrotile_mode_array; radeon_info_ioctl()
500 if (rdev->family >= CHIP_BONAIRE) { radeon_info_ioctl()
501 *value = rdev->config.cik.backend_enable_mask; radeon_info_ioctl()
502 } else if (rdev->family >= CHIP_TAHITI) { radeon_info_ioctl()
503 *value = rdev->config.si.backend_enable_mask; radeon_info_ioctl()
509 if ((rdev->pm.pm_method == PM_METHOD_DPM) && radeon_info_ioctl()
510 rdev->pm.dpm_enabled) radeon_info_ioctl()
511 *value = rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk * 10; radeon_info_ioctl()
513 *value = rdev->pm.default_sclk * 10; radeon_info_ioctl()
516 *value = rdev->vce.fw_version; radeon_info_ioctl()
519 *value = rdev->vce.fb_version; radeon_info_ioctl()
524 value64 = atomic64_read(&rdev->num_bytes_moved); radeon_info_ioctl()
529 value64 = atomic64_read(&rdev->vram_usage); radeon_info_ioctl()
534 value64 = atomic64_read(&rdev->gtt_usage); radeon_info_ioctl()
537 if (rdev->family >= CHIP_BONAIRE) radeon_info_ioctl()
538 *value = rdev->config.cik.active_cus; radeon_info_ioctl()
539 else if (rdev->family >= CHIP_TAHITI) radeon_info_ioctl()
540 *value = rdev->config.si.active_cus; radeon_info_ioctl()
541 else if (rdev->family >= CHIP_CAYMAN) radeon_info_ioctl()
542 *value = rdev->config.cayman.active_simds; radeon_info_ioctl()
543 else if (rdev->family >= CHIP_CEDAR) radeon_info_ioctl()
544 *value = rdev->config.evergreen.active_simds; radeon_info_ioctl()
545 else if (rdev->family >= CHIP_RV770) radeon_info_ioctl()
546 *value = rdev->config.rv770.active_simds; radeon_info_ioctl()
547 else if (rdev->family >= CHIP_R600) radeon_info_ioctl()
548 *value = rdev->config.r600.active_simds; radeon_info_ioctl()
554 if (rdev->asic->pm.get_temperature) radeon_info_ioctl()
555 *value = radeon_get_temperature(rdev); radeon_info_ioctl()
561 if (rdev->pm.dpm_enabled) radeon_info_ioctl()
562 *value = radeon_dpm_get_current_sclk(rdev) / 100; radeon_info_ioctl()
564 *value = rdev->pm.current_sclk / 100; radeon_info_ioctl()
568 if (rdev->pm.dpm_enabled) radeon_info_ioctl()
569 *value = radeon_dpm_get_current_mclk(rdev) / 100; radeon_info_ioctl()
571 *value = rdev->pm.current_mclk / 100; radeon_info_ioctl()
578 if (radeon_get_allowed_info_register(rdev, *value, value)) radeon_info_ioctl()
585 *value = atomic_read(&rdev->gpu_reset_counter); radeon_info_ioctl()
611 struct radeon_device *rdev = dev->dev_private; radeon_driver_lastclose_kms() local
613 radeon_fbdev_restore_mode(rdev); radeon_driver_lastclose_kms()
628 struct radeon_device *rdev = dev->dev_private; radeon_driver_open_kms() local
638 if (rdev->family >= CHIP_CAYMAN) { radeon_driver_open_kms()
648 if (rdev->accel_working) { radeon_driver_open_kms()
650 r = radeon_vm_init(rdev, vm); radeon_driver_open_kms()
656 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); radeon_driver_open_kms()
658 radeon_vm_fini(rdev, vm); radeon_driver_open_kms()
665 vm->ib_bo_va = radeon_vm_bo_add(rdev, vm, radeon_driver_open_kms()
666 rdev->ring_tmp_bo.bo); radeon_driver_open_kms()
667 r = radeon_vm_bo_set_addr(rdev, vm->ib_bo_va, radeon_driver_open_kms()
672 radeon_vm_fini(rdev, vm); radeon_driver_open_kms()
696 struct radeon_device *rdev = dev->dev_private; radeon_driver_postclose_kms() local
699 if (rdev->family >= CHIP_CAYMAN && file_priv->driver_priv) { radeon_driver_postclose_kms()
704 if (rdev->accel_working) { radeon_driver_postclose_kms()
705 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); radeon_driver_postclose_kms()
708 radeon_vm_bo_rmv(rdev, vm->ib_bo_va); radeon_driver_postclose_kms()
709 radeon_bo_unreserve(rdev->ring_tmp_bo.bo); radeon_driver_postclose_kms()
711 radeon_vm_fini(rdev, vm); radeon_driver_postclose_kms()
731 struct radeon_device *rdev = dev->dev_private; radeon_driver_preclose_kms() local
733 mutex_lock(&rdev->gem.mutex); radeon_driver_preclose_kms()
734 if (rdev->hyperz_filp == file_priv) radeon_driver_preclose_kms()
735 rdev->hyperz_filp = NULL; radeon_driver_preclose_kms()
736 if (rdev->cmask_filp == file_priv) radeon_driver_preclose_kms()
737 rdev->cmask_filp = NULL; radeon_driver_preclose_kms()
738 mutex_unlock(&rdev->gem.mutex); radeon_driver_preclose_kms()
740 radeon_uvd_free_handles(rdev, file_priv); radeon_driver_preclose_kms()
741 radeon_vce_free_handles(rdev, file_priv); radeon_driver_preclose_kms()
760 struct radeon_device *rdev = dev->dev_private; radeon_get_vblank_counter_kms() local
762 if (crtc < 0 || crtc >= rdev->num_crtc) { radeon_get_vblank_counter_kms()
775 if (rdev->mode_info.crtcs[crtc]) { radeon_get_vblank_counter_kms()
780 count = radeon_get_vblank_counter(rdev, crtc); radeon_get_vblank_counter_kms()
788 &rdev->mode_info.crtcs[crtc]->base.hwmode); radeon_get_vblank_counter_kms()
789 } while (count != radeon_get_vblank_counter(rdev, crtc)); radeon_get_vblank_counter_kms()
809 count = radeon_get_vblank_counter(rdev, crtc); radeon_get_vblank_counter_kms()
827 struct radeon_device *rdev = dev->dev_private; radeon_enable_vblank_kms() local
831 if (crtc < 0 || crtc >= rdev->num_crtc) { radeon_enable_vblank_kms()
836 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_enable_vblank_kms()
837 rdev->irq.crtc_vblank_int[crtc] = true; radeon_enable_vblank_kms()
838 r = radeon_irq_set(rdev); radeon_enable_vblank_kms()
839 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_enable_vblank_kms()
853 struct radeon_device *rdev = dev->dev_private; radeon_disable_vblank_kms() local
856 if (crtc < 0 || crtc >= rdev->num_crtc) { radeon_disable_vblank_kms()
861 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_disable_vblank_kms()
862 rdev->irq.crtc_vblank_int[crtc] = false; radeon_disable_vblank_kms()
863 radeon_irq_set(rdev); radeon_disable_vblank_kms()
864 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_disable_vblank_kms()
886 struct radeon_device *rdev = dev->dev_private; radeon_get_vblank_timestamp_kms() local
894 drmcrtc = &rdev->mode_info.crtcs[crtc]->base; radeon_get_vblank_timestamp_kms()
H A Dsi.c114 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
115 static void si_pcie_gen3_enable(struct radeon_device *rdev);
116 static void si_program_aspm(struct radeon_device *rdev);
117 extern void sumo_rlc_fini(struct radeon_device *rdev);
118 extern int sumo_rlc_init(struct radeon_device *rdev);
119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
122 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
124 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
125 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
126 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
127 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
129 static void si_init_pg(struct radeon_device *rdev);
130 static void si_init_cg(struct radeon_device *rdev);
131 static void si_fini_pg(struct radeon_device *rdev);
132 static void si_fini_cg(struct radeon_device *rdev);
133 static void si_rlc_stop(struct radeon_device *rdev);
1198 static void si_init_golden_registers(struct radeon_device *rdev) si_init_golden_registers() argument
1200 switch (rdev->family) { si_init_golden_registers()
1202 radeon_program_register_sequence(rdev, si_init_golden_registers()
1205 radeon_program_register_sequence(rdev, si_init_golden_registers()
1208 radeon_program_register_sequence(rdev, si_init_golden_registers()
1211 radeon_program_register_sequence(rdev, si_init_golden_registers()
1216 radeon_program_register_sequence(rdev, si_init_golden_registers()
1219 radeon_program_register_sequence(rdev, si_init_golden_registers()
1222 radeon_program_register_sequence(rdev, si_init_golden_registers()
1227 radeon_program_register_sequence(rdev, si_init_golden_registers()
1230 radeon_program_register_sequence(rdev, si_init_golden_registers()
1233 radeon_program_register_sequence(rdev, si_init_golden_registers()
1236 radeon_program_register_sequence(rdev, si_init_golden_registers()
1241 radeon_program_register_sequence(rdev, si_init_golden_registers()
1244 radeon_program_register_sequence(rdev, si_init_golden_registers()
1247 radeon_program_register_sequence(rdev, si_init_golden_registers()
1252 radeon_program_register_sequence(rdev, si_init_golden_registers()
1255 radeon_program_register_sequence(rdev, si_init_golden_registers()
1258 radeon_program_register_sequence(rdev, si_init_golden_registers()
1270 * @rdev: radeon_device pointer
1277 int si_get_allowed_info_register(struct radeon_device *rdev, si_get_allowed_info_register() argument
1303 * @rdev: radeon_device pointer
1308 u32 si_get_xclk(struct radeon_device *rdev) si_get_xclk() argument
1310 u32 reference_clock = rdev->clock.spll.reference_freq; si_get_xclk()
1325 int si_get_temp(struct radeon_device *rdev) si_get_temp() argument
1541 int si_mc_load_microcode(struct radeon_device *rdev) si_mc_load_microcode() argument
1550 if (!rdev->mc_fw) si_mc_load_microcode()
1553 if (rdev->new_fw) { si_mc_load_microcode()
1555 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; si_mc_load_microcode()
1560 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); si_mc_load_microcode()
1563 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); si_mc_load_microcode()
1565 ucode_size = rdev->mc_fw->size / 4; si_mc_load_microcode()
1567 switch (rdev->family) { si_mc_load_microcode()
1590 fw_data = (const __be32 *)rdev->mc_fw->data; si_mc_load_microcode()
1607 if (rdev->new_fw) { si_mc_load_microcode()
1617 if (rdev->new_fw) si_mc_load_microcode()
1629 for (i = 0; i < rdev->usec_timeout; i++) { si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { si_mc_load_microcode()
1647 static int si_init_microcode(struct radeon_device *rdev) si_init_microcode() argument
1659 switch (rdev->family) { si_init_microcode()
1719 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); si_init_microcode()
1722 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); si_init_microcode()
1725 if (rdev->pfp_fw->size != pfp_req_size) { si_init_microcode()
1728 rdev->pfp_fw->size, fw_name); si_init_microcode()
1733 err = radeon_ucode_validate(rdev->pfp_fw); si_init_microcode()
1745 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); si_init_microcode()
1748 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); si_init_microcode()
1751 if (rdev->me_fw->size != me_req_size) { si_init_microcode()
1754 rdev->me_fw->size, fw_name); si_init_microcode()
1758 err = radeon_ucode_validate(rdev->me_fw); si_init_microcode()
1770 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); si_init_microcode()
1773 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); si_init_microcode()
1776 if (rdev->ce_fw->size != ce_req_size) { si_init_microcode()
1779 rdev->ce_fw->size, fw_name); si_init_microcode()
1783 err = radeon_ucode_validate(rdev->ce_fw); si_init_microcode()
1795 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); si_init_microcode()
1798 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); si_init_microcode()
1801 if (rdev->rlc_fw->size != rlc_req_size) { si_init_microcode()
1804 rdev->rlc_fw->size, fw_name); si_init_microcode()
1808 err = radeon_ucode_validate(rdev->rlc_fw); si_init_microcode()
1820 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); si_init_microcode()
1823 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); si_init_microcode()
1826 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); si_init_microcode()
1830 if ((rdev->mc_fw->size != mc_req_size) && si_init_microcode()
1831 (rdev->mc_fw->size != mc2_req_size)) { si_init_microcode()
1834 rdev->mc_fw->size, fw_name); si_init_microcode()
1837 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); si_init_microcode()
1839 err = radeon_ucode_validate(rdev->mc_fw); si_init_microcode()
1851 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); si_init_microcode()
1854 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); si_init_microcode()
1859 release_firmware(rdev->smc_fw); si_init_microcode()
1860 rdev->smc_fw = NULL; si_init_microcode()
1862 } else if (rdev->smc_fw->size != smc_req_size) { si_init_microcode()
1865 rdev->smc_fw->size, fw_name); si_init_microcode()
1869 err = radeon_ucode_validate(rdev->smc_fw); si_init_microcode()
1881 rdev->new_fw = false; si_init_microcode()
1886 rdev->new_fw = true; si_init_microcode()
1894 release_firmware(rdev->pfp_fw); si_init_microcode()
1895 rdev->pfp_fw = NULL; si_init_microcode()
1896 release_firmware(rdev->me_fw); si_init_microcode()
1897 rdev->me_fw = NULL; si_init_microcode()
1898 release_firmware(rdev->ce_fw); si_init_microcode()
1899 rdev->ce_fw = NULL; si_init_microcode()
1900 release_firmware(rdev->rlc_fw); si_init_microcode()
1901 rdev->rlc_fw = NULL; si_init_microcode()
1902 release_firmware(rdev->mc_fw); si_init_microcode()
1903 rdev->mc_fw = NULL; si_init_microcode()
1904 release_firmware(rdev->smc_fw); si_init_microcode()
1905 rdev->smc_fw = NULL; si_init_microcode()
1911 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev, dce6_line_buffer_adjust() argument
1949 for (i = 0; i < rdev->usec_timeout; i++) { dce6_line_buffer_adjust()
1970 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev) si_get_number_of_dram_channels() argument
2251 static void dce6_program_watermarks(struct radeon_device *rdev, dce6_program_watermarks() argument
2273 if (rdev->family == CHIP_ARUBA) dce6_program_watermarks()
2274 dram_channels = evergreen_get_number_of_dram_channels(rdev); dce6_program_watermarks()
2276 dram_channels = si_get_number_of_dram_channels(rdev); dce6_program_watermarks()
2279 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { dce6_program_watermarks()
2281 radeon_dpm_get_mclk(rdev, false) * 10; dce6_program_watermarks()
2283 radeon_dpm_get_sclk(rdev, false) * 10; dce6_program_watermarks()
2285 wm_high.yclk = rdev->pm.current_mclk * 10; dce6_program_watermarks()
2286 wm_high.sclk = rdev->pm.current_sclk * 10; dce6_program_watermarks()
2306 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { dce6_program_watermarks()
2308 radeon_dpm_get_mclk(rdev, true) * 10; dce6_program_watermarks()
2310 radeon_dpm_get_sclk(rdev, true) * 10; dce6_program_watermarks()
2312 wm_low.yclk = rdev->pm.current_mclk * 10; dce6_program_watermarks()
2313 wm_low.sclk = rdev->pm.current_sclk * 10; dce6_program_watermarks()
2342 (rdev->disp_priority == 2)) { dce6_program_watermarks()
2350 (rdev->disp_priority == 2)) { dce6_program_watermarks()
2414 void dce6_bandwidth_update(struct radeon_device *rdev) dce6_bandwidth_update() argument
2421 if (!rdev->mode_info.mode_config_initialized) dce6_bandwidth_update()
2424 radeon_update_display_priority(rdev); dce6_bandwidth_update()
2426 for (i = 0; i < rdev->num_crtc; i++) { dce6_bandwidth_update()
2427 if (rdev->mode_info.crtcs[i]->base.enabled) dce6_bandwidth_update()
2430 for (i = 0; i < rdev->num_crtc; i += 2) { dce6_bandwidth_update()
2431 mode0 = &rdev->mode_info.crtcs[i]->base.mode; dce6_bandwidth_update()
2432 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; dce6_bandwidth_update()
2433 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); dce6_bandwidth_update()
2434 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); dce6_bandwidth_update()
2435 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); dce6_bandwidth_update()
2436 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); dce6_bandwidth_update()
2443 static void si_tiling_mode_table_init(struct radeon_device *rdev) si_tiling_mode_table_init() argument
2448 switch (rdev->config.si.mem_row_size_in_kb) { si_tiling_mode_table_init()
2461 if ((rdev->family == CHIP_TAHITI) || si_tiling_mode_table_init()
2462 (rdev->family == CHIP_PITCAIRN)) { si_tiling_mode_table_init()
2699 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; si_tiling_mode_table_init()
2702 } else if ((rdev->family == CHIP_VERDE) || si_tiling_mode_table_init()
2703 (rdev->family == CHIP_OLAND) || si_tiling_mode_table_init()
2704 (rdev->family == CHIP_HAINAN)) { si_tiling_mode_table_init()
2941 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; si_tiling_mode_table_init()
2945 DRM_ERROR("unknown asic: 0x%x\n", rdev->family); si_tiling_mode_table_init()
2948 static void si_select_se_sh(struct radeon_device *rdev, si_select_se_sh() argument
2975 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh) si_get_cu_enabled() argument
2993 static void si_setup_spi(struct radeon_device *rdev, si_setup_spi() argument
3002 si_select_se_sh(rdev, i, j); si_setup_spi()
3004 active_cu = si_get_cu_enabled(rdev, cu_per_sh); si_setup_spi()
3017 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); si_setup_spi()
3020 static u32 si_get_rb_disabled(struct radeon_device *rdev, si_get_rb_disabled() argument
3040 static void si_setup_rb(struct radeon_device *rdev, si_setup_rb() argument
3051 si_select_se_sh(rdev, i, j); si_setup_rb()
3052 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se); si_setup_rb()
3056 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); si_setup_rb()
3065 rdev->config.si.backend_enable_mask = enabled_rbs; si_setup_rb()
3068 si_select_se_sh(rdev, i, 0xffffffff); si_setup_rb()
3087 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); si_setup_rb()
3090 static void si_gpu_init(struct radeon_device *rdev) si_gpu_init() argument
3099 switch (rdev->family) { si_gpu_init()
3101 rdev->config.si.max_shader_engines = 2; si_gpu_init()
3102 rdev->config.si.max_tile_pipes = 12; si_gpu_init()
3103 rdev->config.si.max_cu_per_sh = 8; si_gpu_init()
3104 rdev->config.si.max_sh_per_se = 2; si_gpu_init()
3105 rdev->config.si.max_backends_per_se = 4; si_gpu_init()
3106 rdev->config.si.max_texture_channel_caches = 12; si_gpu_init()
3107 rdev->config.si.max_gprs = 256; si_gpu_init()
3108 rdev->config.si.max_gs_threads = 32; si_gpu_init()
3109 rdev->config.si.max_hw_contexts = 8; si_gpu_init()
3111 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; si_gpu_init()
3112 rdev->config.si.sc_prim_fifo_size_backend = 0x100; si_gpu_init()
3113 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; si_gpu_init()
3114 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; si_gpu_init()
3118 rdev->config.si.max_shader_engines = 2; si_gpu_init()
3119 rdev->config.si.max_tile_pipes = 8; si_gpu_init()
3120 rdev->config.si.max_cu_per_sh = 5; si_gpu_init()
3121 rdev->config.si.max_sh_per_se = 2; si_gpu_init()
3122 rdev->config.si.max_backends_per_se = 4; si_gpu_init()
3123 rdev->config.si.max_texture_channel_caches = 8; si_gpu_init()
3124 rdev->config.si.max_gprs = 256; si_gpu_init()
3125 rdev->config.si.max_gs_threads = 32; si_gpu_init()
3126 rdev->config.si.max_hw_contexts = 8; si_gpu_init()
3128 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; si_gpu_init()
3129 rdev->config.si.sc_prim_fifo_size_backend = 0x100; si_gpu_init()
3130 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; si_gpu_init()
3131 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; si_gpu_init()
3136 rdev->config.si.max_shader_engines = 1; si_gpu_init()
3137 rdev->config.si.max_tile_pipes = 4; si_gpu_init()
3138 rdev->config.si.max_cu_per_sh = 5; si_gpu_init()
3139 rdev->config.si.max_sh_per_se = 2; si_gpu_init()
3140 rdev->config.si.max_backends_per_se = 4; si_gpu_init()
3141 rdev->config.si.max_texture_channel_caches = 4; si_gpu_init()
3142 rdev->config.si.max_gprs = 256; si_gpu_init()
3143 rdev->config.si.max_gs_threads = 32; si_gpu_init()
3144 rdev->config.si.max_hw_contexts = 8; si_gpu_init()
3146 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; si_gpu_init()
3147 rdev->config.si.sc_prim_fifo_size_backend = 0x40; si_gpu_init()
3148 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; si_gpu_init()
3149 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; si_gpu_init()
3153 rdev->config.si.max_shader_engines = 1; si_gpu_init()
3154 rdev->config.si.max_tile_pipes = 4; si_gpu_init()
3155 rdev->config.si.max_cu_per_sh = 6; si_gpu_init()
3156 rdev->config.si.max_sh_per_se = 1; si_gpu_init()
3157 rdev->config.si.max_backends_per_se = 2; si_gpu_init()
3158 rdev->config.si.max_texture_channel_caches = 4; si_gpu_init()
3159 rdev->config.si.max_gprs = 256; si_gpu_init()
3160 rdev->config.si.max_gs_threads = 16; si_gpu_init()
3161 rdev->config.si.max_hw_contexts = 8; si_gpu_init()
3163 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; si_gpu_init()
3164 rdev->config.si.sc_prim_fifo_size_backend = 0x40; si_gpu_init()
3165 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; si_gpu_init()
3166 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; si_gpu_init()
3170 rdev->config.si.max_shader_engines = 1; si_gpu_init()
3171 rdev->config.si.max_tile_pipes = 4; si_gpu_init()
3172 rdev->config.si.max_cu_per_sh = 5; si_gpu_init()
3173 rdev->config.si.max_sh_per_se = 1; si_gpu_init()
3174 rdev->config.si.max_backends_per_se = 1; si_gpu_init()
3175 rdev->config.si.max_texture_channel_caches = 2; si_gpu_init()
3176 rdev->config.si.max_gprs = 256; si_gpu_init()
3177 rdev->config.si.max_gs_threads = 16; si_gpu_init()
3178 rdev->config.si.max_hw_contexts = 8; si_gpu_init()
3180 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; si_gpu_init()
3181 rdev->config.si.sc_prim_fifo_size_backend = 0x40; si_gpu_init()
3182 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; si_gpu_init()
3183 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; si_gpu_init()
3201 evergreen_fix_pci_max_read_req_size(rdev); si_gpu_init()
3208 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes; si_gpu_init()
3209 rdev->config.si.mem_max_burst_length_bytes = 256; si_gpu_init()
3211 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; si_gpu_init()
3212 if (rdev->config.si.mem_row_size_in_kb > 4) si_gpu_init()
3213 rdev->config.si.mem_row_size_in_kb = 4; si_gpu_init()
3215 rdev->config.si.shader_engine_tile_size = 32; si_gpu_init()
3216 rdev->config.si.num_gpus = 1; si_gpu_init()
3217 rdev->config.si.multi_gpu_tile_size = 64; si_gpu_init()
3221 switch (rdev->config.si.mem_row_size_in_kb) { si_gpu_init()
3241 rdev->config.si.tile_config = 0; si_gpu_init()
3242 switch (rdev->config.si.num_tile_pipes) { si_gpu_init()
3244 rdev->config.si.tile_config |= (0 << 0); si_gpu_init()
3247 rdev->config.si.tile_config |= (1 << 0); si_gpu_init()
3250 rdev->config.si.tile_config |= (2 << 0); si_gpu_init()
3255 rdev->config.si.tile_config |= (3 << 0); si_gpu_init()
3260 rdev->config.si.tile_config |= 0 << 4; si_gpu_init()
3263 rdev->config.si.tile_config |= 1 << 4; si_gpu_init()
3267 rdev->config.si.tile_config |= 2 << 4; si_gpu_init()
3270 rdev->config.si.tile_config |= si_gpu_init()
3272 rdev->config.si.tile_config |= si_gpu_init()
3281 if (rdev->has_uvd) { si_gpu_init()
3287 si_tiling_mode_table_init(rdev); si_gpu_init()
3289 si_setup_rb(rdev, rdev->config.si.max_shader_engines, si_gpu_init()
3290 rdev->config.si.max_sh_per_se, si_gpu_init()
3291 rdev->config.si.max_backends_per_se); si_gpu_init()
3293 si_setup_spi(rdev, rdev->config.si.max_shader_engines, si_gpu_init()
3294 rdev->config.si.max_sh_per_se, si_gpu_init()
3295 rdev->config.si.max_cu_per_sh); si_gpu_init()
3297 rdev->config.si.active_cus = 0; si_gpu_init()
3298 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { si_gpu_init()
3299 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { si_gpu_init()
3300 rdev->config.si.active_cus += si_gpu_init()
3301 hweight32(si_get_cu_active_bitmap(rdev, i, j)); si_gpu_init()
3315 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) | si_gpu_init()
3316 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) | si_gpu_init()
3317 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) | si_gpu_init()
3318 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size))); si_gpu_init()
3359 static void si_scratch_init(struct radeon_device *rdev) si_scratch_init() argument
3363 rdev->scratch.num_reg = 7; si_scratch_init()
3364 rdev->scratch.reg_base = SCRATCH_REG0; si_scratch_init()
3365 for (i = 0; i < rdev->scratch.num_reg; i++) { si_scratch_init()
3366 rdev->scratch.free[i] = true; si_scratch_init()
3367 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); si_scratch_init()
3371 void si_fence_ring_emit(struct radeon_device *rdev, si_fence_ring_emit() argument
3374 struct radeon_ring *ring = &rdev->ring[fence->ring]; si_fence_ring_emit()
3375 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; si_fence_ring_emit()
3401 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) si_ring_ib_execute() argument
3403 struct radeon_ring *ring = &rdev->ring[ib->ring]; si_ring_ib_execute()
3421 } else if (rdev->wb.enabled) { si_ring_ib_execute()
3461 static void si_cp_enable(struct radeon_device *rdev, bool enable) si_cp_enable() argument
3466 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) si_cp_enable()
3467 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); si_cp_enable()
3470 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; si_cp_enable()
3471 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; si_cp_enable()
3472 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; si_cp_enable()
3477 static int si_cp_load_microcode(struct radeon_device *rdev) si_cp_load_microcode() argument
3481 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) si_cp_load_microcode()
3484 si_cp_enable(rdev, false); si_cp_load_microcode()
3486 if (rdev->new_fw) { si_cp_load_microcode()
3488 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; si_cp_load_microcode()
3490 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; si_cp_load_microcode()
3492 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; si_cp_load_microcode()
3502 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); si_cp_load_microcode()
3511 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); si_cp_load_microcode()
3520 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); si_cp_load_microcode()
3530 fw_data = (const __be32 *)rdev->pfp_fw->data; si_cp_load_microcode()
3537 fw_data = (const __be32 *)rdev->ce_fw->data; si_cp_load_microcode()
3544 fw_data = (const __be32 *)rdev->me_fw->data; si_cp_load_microcode()
3558 static int si_cp_start(struct radeon_device *rdev) si_cp_start() argument
3560 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_cp_start()
3563 r = radeon_ring_lock(rdev, ring, 7 + 4); si_cp_start()
3572 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1); si_cp_start()
3582 radeon_ring_unlock_commit(rdev, ring, false); si_cp_start()
3584 si_cp_enable(rdev, true); si_cp_start()
3586 r = radeon_ring_lock(rdev, ring, si_default_size + 10); si_cp_start()
3611 radeon_ring_unlock_commit(rdev, ring, false); si_cp_start()
3614 ring = &rdev->ring[i]; si_cp_start()
3615 r = radeon_ring_lock(rdev, ring, 2); si_cp_start()
3621 radeon_ring_unlock_commit(rdev, ring, false); si_cp_start()
3627 static void si_cp_fini(struct radeon_device *rdev) si_cp_fini() argument
3630 si_cp_enable(rdev, false); si_cp_fini()
3632 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_cp_fini()
3633 radeon_ring_fini(rdev, ring); si_cp_fini()
3634 radeon_scratch_free(rdev, ring->rptr_save_reg); si_cp_fini()
3636 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; si_cp_fini()
3637 radeon_ring_fini(rdev, ring); si_cp_fini()
3638 radeon_scratch_free(rdev, ring->rptr_save_reg); si_cp_fini()
3640 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; si_cp_fini()
3641 radeon_ring_fini(rdev, ring); si_cp_fini()
3642 radeon_scratch_free(rdev, ring->rptr_save_reg); si_cp_fini()
3645 static int si_cp_resume(struct radeon_device *rdev) si_cp_resume() argument
3652 si_enable_gui_idle_interrupt(rdev, false); si_cp_resume()
3661 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); si_cp_resume()
3665 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_cp_resume()
3679 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); si_cp_resume()
3680 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); si_cp_resume()
3682 if (rdev->wb.enabled) si_cp_resume()
3696 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; si_cp_resume()
3710 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC); si_cp_resume()
3711 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF); si_cp_resume()
3720 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; si_cp_resume()
3734 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC); si_cp_resume()
3735 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF); si_cp_resume()
3743 si_cp_start(rdev); si_cp_resume()
3744 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; si_cp_resume()
3745 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true; si_cp_resume()
3746 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true; si_cp_resume()
3747 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); si_cp_resume()
3749 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; si_cp_resume()
3750 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; si_cp_resume()
3751 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; si_cp_resume()
3754 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); si_cp_resume()
3756 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; si_cp_resume()
3758 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); si_cp_resume()
3760 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; si_cp_resume()
3763 si_enable_gui_idle_interrupt(rdev, true); si_cp_resume()
3765 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) si_cp_resume()
3766 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); si_cp_resume()
3771 u32 si_gpu_check_soft_reset(struct radeon_device *rdev) si_gpu_check_soft_reset() argument
3835 if (evergreen_is_display_hung(rdev)) si_gpu_check_soft_reset()
3852 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) si_gpu_soft_reset() argument
3861 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); si_gpu_soft_reset()
3863 evergreen_print_gpu_status_regs(rdev); si_gpu_soft_reset()
3864 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", si_gpu_soft_reset()
3866 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", si_gpu_soft_reset()
3870 si_fini_pg(rdev); si_gpu_soft_reset()
3871 si_fini_cg(rdev); si_gpu_soft_reset()
3874 si_rlc_stop(rdev); si_gpu_soft_reset()
3894 evergreen_mc_stop(rdev, &save); si_gpu_soft_reset()
3895 if (evergreen_mc_wait_for_idle(rdev)) { si_gpu_soft_reset()
3896 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); si_gpu_soft_reset()
3950 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); si_gpu_soft_reset()
3964 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); si_gpu_soft_reset()
3978 evergreen_mc_resume(rdev, &save); si_gpu_soft_reset()
3981 evergreen_print_gpu_status_regs(rdev); si_gpu_soft_reset()
3984 static void si_set_clk_bypass_mode(struct radeon_device *rdev) si_set_clk_bypass_mode() argument
3996 for (i = 0; i < rdev->usec_timeout; i++) { si_set_clk_bypass_mode()
4011 static void si_spll_powerdown(struct radeon_device *rdev) si_spll_powerdown() argument
4032 static void si_gpu_pci_config_reset(struct radeon_device *rdev) si_gpu_pci_config_reset() argument
4037 dev_info(rdev->dev, "GPU pci config reset\n"); si_gpu_pci_config_reset()
4042 si_fini_pg(rdev); si_gpu_pci_config_reset()
4043 si_fini_cg(rdev); si_gpu_pci_config_reset()
4058 si_rlc_stop(rdev); si_gpu_pci_config_reset()
4063 evergreen_mc_stop(rdev, &save); si_gpu_pci_config_reset()
4064 if (evergreen_mc_wait_for_idle(rdev)) { si_gpu_pci_config_reset()
4065 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); si_gpu_pci_config_reset()
4069 si_set_clk_bypass_mode(rdev); si_gpu_pci_config_reset()
4071 si_spll_powerdown(rdev); si_gpu_pci_config_reset()
4073 pci_clear_master(rdev->pdev); si_gpu_pci_config_reset()
4075 radeon_pci_config_reset(rdev); si_gpu_pci_config_reset()
4077 for (i = 0; i < rdev->usec_timeout; i++) { si_gpu_pci_config_reset()
4084 int si_asic_reset(struct radeon_device *rdev) si_asic_reset() argument
4088 reset_mask = si_gpu_check_soft_reset(rdev); si_asic_reset()
4091 r600_set_bios_scratch_engine_hung(rdev, true); si_asic_reset()
4094 si_gpu_soft_reset(rdev, reset_mask); si_asic_reset()
4096 reset_mask = si_gpu_check_soft_reset(rdev); si_asic_reset()
4100 si_gpu_pci_config_reset(rdev); si_asic_reset()
4102 reset_mask = si_gpu_check_soft_reset(rdev); si_asic_reset()
4105 r600_set_bios_scratch_engine_hung(rdev, false); si_asic_reset()
4113 * @rdev: radeon_device pointer
4119 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) si_gfx_is_lockup() argument
4121 u32 reset_mask = si_gpu_check_soft_reset(rdev); si_gfx_is_lockup()
4126 radeon_ring_lockup_update(rdev, ring); si_gfx_is_lockup()
4129 return radeon_ring_test_lockup(rdev, ring); si_gfx_is_lockup()
4133 static void si_mc_program(struct radeon_device *rdev) si_mc_program() argument
4149 evergreen_mc_stop(rdev, &save); si_mc_program()
4150 if (radeon_mc_wait_for_idle(rdev)) { si_mc_program()
4151 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); si_mc_program()
4153 if (!ASIC_IS_NODCE(rdev)) si_mc_program()
4158 rdev->mc.vram_start >> 12); si_mc_program()
4160 rdev->mc.vram_end >> 12); si_mc_program()
4162 rdev->vram_scratch.gpu_addr >> 12); si_mc_program()
4163 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; si_mc_program()
4164 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); si_mc_program()
4167 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); si_mc_program()
4173 if (radeon_mc_wait_for_idle(rdev)) { si_mc_program()
4174 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); si_mc_program()
4176 evergreen_mc_resume(rdev, &save); si_mc_program()
4177 if (!ASIC_IS_NODCE(rdev)) { si_mc_program()
4180 rv515_vga_render_disable(rdev); si_mc_program()
4184 void si_vram_gtt_location(struct radeon_device *rdev, si_vram_gtt_location() argument
4189 dev_warn(rdev->dev, "limiting VRAM\n"); si_vram_gtt_location()
4193 radeon_vram_location(rdev, &rdev->mc, 0); si_vram_gtt_location()
4194 rdev->mc.gtt_base_align = 0; si_vram_gtt_location()
4195 radeon_gtt_location(rdev, mc); si_vram_gtt_location()
4198 static int si_mc_init(struct radeon_device *rdev) si_mc_init() argument
4204 rdev->mc.vram_is_ddr = true; si_mc_init()
4244 rdev->mc.vram_width = numchan * chansize; si_mc_init()
4246 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); si_mc_init()
4247 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); si_mc_init()
4256 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL; si_mc_init()
4257 rdev->mc.real_vram_size = rdev->mc.mc_vram_size; si_mc_init()
4258 rdev->mc.visible_vram_size = rdev->mc.aper_size; si_mc_init()
4259 si_vram_gtt_location(rdev, &rdev->mc); si_mc_init()
4260 radeon_update_bandwidth_info(rdev); si_mc_init()
4268 void si_pcie_gart_tlb_flush(struct radeon_device *rdev) si_pcie_gart_tlb_flush() argument
4277 static int si_pcie_gart_enable(struct radeon_device *rdev) si_pcie_gart_enable() argument
4281 if (rdev->gart.robj == NULL) { si_pcie_gart_enable()
4282 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); si_pcie_gart_enable()
4285 r = radeon_gart_table_vram_pin(rdev); si_pcie_gart_enable()
4308 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); si_pcie_gart_enable()
4309 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); si_pcie_gart_enable()
4310 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); si_pcie_gart_enable()
4312 (u32)(rdev->dummy_page.addr >> 12)); si_pcie_gart_enable()
4324 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); si_pcie_gart_enable()
4332 rdev->vm_manager.saved_table_addr[i]); si_pcie_gart_enable()
4335 rdev->vm_manager.saved_table_addr[i]); si_pcie_gart_enable()
4340 (u32)(rdev->dummy_page.addr >> 12)); si_pcie_gart_enable()
4357 si_pcie_gart_tlb_flush(rdev); si_pcie_gart_enable()
4359 (unsigned)(rdev->mc.gtt_size >> 20), si_pcie_gart_enable()
4360 (unsigned long long)rdev->gart.table_addr); si_pcie_gart_enable()
4361 rdev->gart.ready = true; si_pcie_gart_enable()
4365 static void si_pcie_gart_disable(struct radeon_device *rdev) si_pcie_gart_disable() argument
4375 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); si_pcie_gart_disable()
4392 radeon_gart_table_vram_unpin(rdev); si_pcie_gart_disable()
4395 static void si_pcie_gart_fini(struct radeon_device *rdev) si_pcie_gart_fini() argument
4397 si_pcie_gart_disable(rdev); si_pcie_gart_fini()
4398 radeon_gart_table_vram_free(rdev); si_pcie_gart_fini()
4399 radeon_gart_fini(rdev); si_pcie_gart_fini()
4444 static int si_vm_packet3_ce_check(struct radeon_device *rdev, si_vm_packet3_ce_check() argument
4517 static int si_vm_packet3_gfx_check(struct radeon_device *rdev, si_vm_packet3_gfx_check() argument
4635 static int si_vm_packet3_compute_check(struct radeon_device *rdev, si_vm_packet3_compute_check() argument
4723 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) si_ib_parse() argument
4736 dev_err(rdev->dev, "Packet0 not allowed!\n"); si_ib_parse()
4745 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt); si_ib_parse()
4749 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt); si_ib_parse()
4753 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt); si_ib_parse()
4756 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring); si_ib_parse()
4764 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type); si_ib_parse()
4785 int si_vm_init(struct radeon_device *rdev) si_vm_init() argument
4788 rdev->vm_manager.nvm = 16; si_vm_init()
4790 rdev->vm_manager.vram_base_offset = 0; si_vm_init()
4795 void si_vm_fini(struct radeon_device *rdev) si_vm_fini() argument
4802 * @rdev: radeon_device pointer
4808 static void si_vm_decode_fault(struct radeon_device *rdev, si_vm_decode_fault() argument
4816 if (rdev->family == CHIP_TAHITI) { si_vm_decode_fault()
5063 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, si_vm_flush() argument
5115 static void si_wait_for_rlc_serdes(struct radeon_device *rdev) si_wait_for_rlc_serdes() argument
5119 for (i = 0; i < rdev->usec_timeout; i++) { si_wait_for_rlc_serdes()
5125 for (i = 0; i < rdev->usec_timeout; i++) { si_wait_for_rlc_serdes()
5132 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev, si_enable_gui_idle_interrupt() argument
5150 for (i = 0; i < rdev->usec_timeout; i++) { si_enable_gui_idle_interrupt()
5158 static void si_set_uvd_dcm(struct radeon_device *rdev, si_set_uvd_dcm() argument
5179 void si_init_uvd_internal_cg(struct radeon_device *rdev) si_init_uvd_internal_cg() argument
5184 si_set_uvd_dcm(rdev, false); si_init_uvd_internal_cg()
5192 static u32 si_halt_rlc(struct radeon_device *rdev) si_halt_rlc() argument
5202 si_wait_for_rlc_serdes(rdev); si_halt_rlc()
5208 static void si_update_rlc(struct radeon_device *rdev, u32 rlc) si_update_rlc() argument
5217 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable) si_enable_dma_pg() argument
5222 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA)) si_enable_dma_pg()
5230 static void si_init_dma_pg(struct radeon_device *rdev) si_init_dma_pg() argument
5241 static void si_enable_gfx_cgpg(struct radeon_device *rdev, si_enable_gfx_cgpg() argument
5246 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { si_enable_gfx_cgpg()
5266 static void si_init_gfx_cgpg(struct radeon_device *rdev) si_init_gfx_cgpg() argument
5270 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); si_init_gfx_cgpg()
5276 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); si_init_gfx_cgpg()
5286 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh) si_get_cu_active_bitmap() argument
5291 si_select_se_sh(rdev, se, sh); si_get_cu_active_bitmap()
5294 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); si_get_cu_active_bitmap()
5301 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) { si_get_cu_active_bitmap()
5309 static void si_init_ao_cu_mask(struct radeon_device *rdev) si_init_ao_cu_mask() argument
5315 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { si_init_ao_cu_mask()
5316 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { si_init_ao_cu_mask()
5320 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) { si_init_ao_cu_mask()
5321 if (si_get_cu_active_bitmap(rdev, i, j) & mask) { si_init_ao_cu_mask()
5342 static void si_enable_cgcg(struct radeon_device *rdev, si_enable_cgcg() argument
5349 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { si_enable_cgcg()
5350 si_enable_gui_idle_interrupt(rdev, true); si_enable_cgcg()
5354 tmp = si_halt_rlc(rdev); si_enable_cgcg()
5360 si_wait_for_rlc_serdes(rdev); si_enable_cgcg()
5362 si_update_rlc(rdev, tmp); si_enable_cgcg()
5368 si_enable_gui_idle_interrupt(rdev, false); si_enable_cgcg()
5382 static void si_enable_mgcg(struct radeon_device *rdev, si_enable_mgcg() argument
5387 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { si_enable_mgcg()
5393 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { si_enable_mgcg()
5405 tmp = si_halt_rlc(rdev); si_enable_mgcg()
5411 si_update_rlc(rdev, tmp); si_enable_mgcg()
5428 tmp = si_halt_rlc(rdev); si_enable_mgcg()
5434 si_update_rlc(rdev, tmp); si_enable_mgcg()
5438 static void si_enable_uvd_mgcg(struct radeon_device *rdev, si_enable_uvd_mgcg() argument
5443 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { si_enable_uvd_mgcg()
5483 static void si_enable_mc_ls(struct radeon_device *rdev, si_enable_mc_ls() argument
5491 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) si_enable_mc_ls()
5500 static void si_enable_mc_mgcg(struct radeon_device *rdev, si_enable_mc_mgcg() argument
5508 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) si_enable_mc_mgcg()
5517 static void si_enable_dma_mgcg(struct radeon_device *rdev, si_enable_dma_mgcg() argument
5523 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { si_enable_dma_mgcg()
5554 static void si_enable_bif_mgls(struct radeon_device *rdev, si_enable_bif_mgls() argument
5561 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) si_enable_bif_mgls()
5572 static void si_enable_hdp_mgcg(struct radeon_device *rdev, si_enable_hdp_mgcg() argument
5579 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) si_enable_hdp_mgcg()
5588 static void si_enable_hdp_ls(struct radeon_device *rdev, si_enable_hdp_ls() argument
5595 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) si_enable_hdp_ls()
5604 static void si_update_cg(struct radeon_device *rdev, si_update_cg() argument
5608 si_enable_gui_idle_interrupt(rdev, false); si_update_cg()
5611 si_enable_mgcg(rdev, true); si_update_cg()
5612 si_enable_cgcg(rdev, true); si_update_cg()
5614 si_enable_cgcg(rdev, false); si_update_cg()
5615 si_enable_mgcg(rdev, false); si_update_cg()
5617 si_enable_gui_idle_interrupt(rdev, true); si_update_cg()
5621 si_enable_mc_mgcg(rdev, enable); si_update_cg()
5622 si_enable_mc_ls(rdev, enable); si_update_cg()
5626 si_enable_dma_mgcg(rdev, enable); si_update_cg()
5630 si_enable_bif_mgls(rdev, enable); si_update_cg()
5634 if (rdev->has_uvd) { si_update_cg()
5635 si_enable_uvd_mgcg(rdev, enable); si_update_cg()
5640 si_enable_hdp_mgcg(rdev, enable); si_update_cg()
5641 si_enable_hdp_ls(rdev, enable); si_update_cg()
5645 static void si_init_cg(struct radeon_device *rdev) si_init_cg() argument
5647 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | si_init_cg()
5652 if (rdev->has_uvd) { si_init_cg()
5653 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true); si_init_cg()
5654 si_init_uvd_internal_cg(rdev); si_init_cg()
5658 static void si_fini_cg(struct radeon_device *rdev) si_fini_cg() argument
5660 if (rdev->has_uvd) { si_fini_cg()
5661 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false); si_fini_cg()
5663 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | si_fini_cg()
5670 u32 si_get_csb_size(struct radeon_device *rdev) si_get_csb_size() argument
5676 if (rdev->rlc.cs_data == NULL) si_get_csb_size()
5684 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { si_get_csb_size()
5702 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer) si_get_csb_buffer() argument
5708 if (rdev->rlc.cs_data == NULL) si_get_csb_buffer()
5720 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { si_get_csb_buffer()
5736 switch (rdev->family) { si_get_csb_buffer()
5762 static void si_init_pg(struct radeon_device *rdev) si_init_pg() argument
5764 if (rdev->pg_flags) { si_init_pg()
5765 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) { si_init_pg()
5766 si_init_dma_pg(rdev); si_init_pg()
5768 si_init_ao_cu_mask(rdev); si_init_pg()
5769 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { si_init_pg()
5770 si_init_gfx_cgpg(rdev); si_init_pg()
5772 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); si_init_pg()
5773 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); si_init_pg()
5775 si_enable_dma_pg(rdev, true); si_init_pg()
5776 si_enable_gfx_cgpg(rdev, true); si_init_pg()
5778 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); si_init_pg()
5779 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); si_init_pg()
5783 static void si_fini_pg(struct radeon_device *rdev) si_fini_pg() argument
5785 if (rdev->pg_flags) { si_fini_pg()
5786 si_enable_dma_pg(rdev, false); si_fini_pg()
5787 si_enable_gfx_cgpg(rdev, false); si_fini_pg()
5794 void si_rlc_reset(struct radeon_device *rdev) si_rlc_reset() argument
5806 static void si_rlc_stop(struct radeon_device *rdev) si_rlc_stop() argument
5810 si_enable_gui_idle_interrupt(rdev, false); si_rlc_stop()
5812 si_wait_for_rlc_serdes(rdev); si_rlc_stop()
5815 static void si_rlc_start(struct radeon_device *rdev) si_rlc_start() argument
5819 si_enable_gui_idle_interrupt(rdev, true); si_rlc_start()
5824 static bool si_lbpw_supported(struct radeon_device *rdev) si_lbpw_supported() argument
5835 static void si_enable_lbpw(struct radeon_device *rdev, bool enable) si_enable_lbpw() argument
5847 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); si_enable_lbpw()
5852 static int si_rlc_resume(struct radeon_device *rdev) si_rlc_resume() argument
5856 if (!rdev->rlc_fw) si_rlc_resume()
5859 si_rlc_stop(rdev); si_rlc_resume()
5861 si_rlc_reset(rdev); si_rlc_resume()
5863 si_init_pg(rdev); si_rlc_resume()
5865 si_init_cg(rdev); si_rlc_resume()
5877 if (rdev->new_fw) { si_rlc_resume()
5879 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; si_rlc_resume()
5882 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); si_rlc_resume()
5892 (const __be32 *)rdev->rlc_fw->data; si_rlc_resume()
5900 si_enable_lbpw(rdev, si_lbpw_supported(rdev)); si_rlc_resume()
5902 si_rlc_start(rdev); si_rlc_resume()
5907 static void si_enable_interrupts(struct radeon_device *rdev) si_enable_interrupts() argument
5916 rdev->ih.enabled = true; si_enable_interrupts()
5919 static void si_disable_interrupts(struct radeon_device *rdev) si_disable_interrupts() argument
5931 rdev->ih.enabled = false; si_disable_interrupts()
5932 rdev->ih.rptr = 0; si_disable_interrupts()
5935 static void si_disable_interrupt_state(struct radeon_device *rdev) si_disable_interrupt_state() argument
5950 if (rdev->num_crtc >= 2) { si_disable_interrupt_state()
5954 if (rdev->num_crtc >= 4) { si_disable_interrupt_state()
5958 if (rdev->num_crtc >= 6) { si_disable_interrupt_state()
5963 if (rdev->num_crtc >= 2) { si_disable_interrupt_state()
5967 if (rdev->num_crtc >= 4) { si_disable_interrupt_state()
5971 if (rdev->num_crtc >= 6) { si_disable_interrupt_state()
5976 if (!ASIC_IS_NODCE(rdev)) { si_disable_interrupt_state()
5994 static int si_irq_init(struct radeon_device *rdev) si_irq_init() argument
6001 ret = r600_ih_ring_alloc(rdev); si_irq_init()
6006 si_disable_interrupts(rdev); si_irq_init()
6009 ret = si_rlc_resume(rdev); si_irq_init()
6011 r600_ih_ring_fini(rdev); si_irq_init()
6017 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8); si_irq_init()
6027 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); si_irq_init()
6028 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); si_irq_init()
6034 if (rdev->wb.enabled) si_irq_init()
6038 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); si_irq_init()
6039 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); si_irq_init()
6050 if (rdev->msi_enabled) si_irq_init()
6055 si_disable_interrupt_state(rdev); si_irq_init()
6057 pci_set_master(rdev->pdev); si_irq_init()
6060 si_enable_interrupts(rdev); si_irq_init()
6065 int si_irq_set(struct radeon_device *rdev) si_irq_set() argument
6075 if (!rdev->irq.installed) { si_irq_set()
6080 if (!rdev->ih.enabled) { si_irq_set()
6081 si_disable_interrupts(rdev); si_irq_set()
6083 si_disable_interrupt_state(rdev); si_irq_set()
6090 if (!ASIC_IS_NODCE(rdev)) { si_irq_set()
6106 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { si_irq_set()
6110 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { si_irq_set()
6114 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { si_irq_set()
6118 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { si_irq_set()
6123 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { si_irq_set()
6127 if (rdev->irq.crtc_vblank_int[0] || si_irq_set()
6128 atomic_read(&rdev->irq.pflip[0])) { si_irq_set()
6132 if (rdev->irq.crtc_vblank_int[1] || si_irq_set()
6133 atomic_read(&rdev->irq.pflip[1])) { si_irq_set()
6137 if (rdev->irq.crtc_vblank_int[2] || si_irq_set()
6138 atomic_read(&rdev->irq.pflip[2])) { si_irq_set()
6142 if (rdev->irq.crtc_vblank_int[3] || si_irq_set()
6143 atomic_read(&rdev->irq.pflip[3])) { si_irq_set()
6147 if (rdev->irq.crtc_vblank_int[4] || si_irq_set()
6148 atomic_read(&rdev->irq.pflip[4])) { si_irq_set()
6152 if (rdev->irq.crtc_vblank_int[5] || si_irq_set()
6153 atomic_read(&rdev->irq.pflip[5])) { si_irq_set()
6157 if (rdev->irq.hpd[0]) { si_irq_set()
6161 if (rdev->irq.hpd[1]) { si_irq_set()
6165 if (rdev->irq.hpd[2]) { si_irq_set()
6169 if (rdev->irq.hpd[3]) { si_irq_set()
6173 if (rdev->irq.hpd[4]) { si_irq_set()
6177 if (rdev->irq.hpd[5]) { si_irq_set()
6191 if (rdev->irq.dpm_thermal) { si_irq_set()
6196 if (rdev->num_crtc >= 2) { si_irq_set()
6200 if (rdev->num_crtc >= 4) { si_irq_set()
6204 if (rdev->num_crtc >= 6) { si_irq_set()
6209 if (rdev->num_crtc >= 2) { si_irq_set()
6215 if (rdev->num_crtc >= 4) { si_irq_set()
6221 if (rdev->num_crtc >= 6) { si_irq_set()
6228 if (!ASIC_IS_NODCE(rdev)) { si_irq_set()
6245 static inline void si_irq_ack(struct radeon_device *rdev) si_irq_ack() argument
6249 if (ASIC_IS_NODCE(rdev)) si_irq_ack()
6252 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS); si_irq_ack()
6253 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); si_irq_ack()
6254 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); si_irq_ack()
6255 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); si_irq_ack()
6256 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); si_irq_ack()
6257 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); si_irq_ack()
6258 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET); si_irq_ack()
6259 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET); si_irq_ack()
6260 if (rdev->num_crtc >= 4) { si_irq_ack()
6261 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET); si_irq_ack()
6262 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET); si_irq_ack()
6264 if (rdev->num_crtc >= 6) { si_irq_ack()
6265 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET); si_irq_ack()
6266 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET); si_irq_ack()
6269 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6271 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6273 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) si_irq_ack()
6275 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) si_irq_ack()
6277 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) si_irq_ack()
6279 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) si_irq_ack()
6282 if (rdev->num_crtc >= 4) { si_irq_ack()
6283 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6285 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6287 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) si_irq_ack()
6289 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) si_irq_ack()
6291 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) si_irq_ack()
6293 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) si_irq_ack()
6297 if (rdev->num_crtc >= 6) { si_irq_ack()
6298 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6300 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED) si_irq_ack()
6302 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) si_irq_ack()
6304 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) si_irq_ack()
6306 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) si_irq_ack()
6308 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) si_irq_ack()
6312 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) { si_irq_ack()
6317 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) { si_irq_ack()
6322 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) { si_irq_ack()
6327 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) { si_irq_ack()
6332 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) { si_irq_ack()
6337 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) { si_irq_ack()
6343 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) { si_irq_ack()
6348 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) { si_irq_ack()
6353 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { si_irq_ack()
6358 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { si_irq_ack()
6363 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { si_irq_ack()
6368 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { si_irq_ack()
6375 static void si_irq_disable(struct radeon_device *rdev) si_irq_disable() argument
6377 si_disable_interrupts(rdev); si_irq_disable()
6380 si_irq_ack(rdev); si_irq_disable()
6381 si_disable_interrupt_state(rdev); si_irq_disable()
6384 static void si_irq_suspend(struct radeon_device *rdev) si_irq_suspend() argument
6386 si_irq_disable(rdev); si_irq_suspend()
6387 si_rlc_stop(rdev); si_irq_suspend()
6390 static void si_irq_fini(struct radeon_device *rdev) si_irq_fini() argument
6392 si_irq_suspend(rdev); si_irq_fini()
6393 r600_ih_ring_fini(rdev); si_irq_fini()
6396 static inline u32 si_get_ih_wptr(struct radeon_device *rdev) si_get_ih_wptr() argument
6400 if (rdev->wb.enabled) si_get_ih_wptr()
6401 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); si_get_ih_wptr()
6411 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", si_get_ih_wptr()
6412 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); si_get_ih_wptr()
6413 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; si_get_ih_wptr()
6418 return (wptr & rdev->ih.ptr_mask); si_get_ih_wptr()
6431 int si_irq_process(struct radeon_device *rdev) si_irq_process() argument
6442 if (!rdev->ih.enabled || rdev->shutdown) si_irq_process()
6445 wptr = si_get_ih_wptr(rdev); si_irq_process()
6449 if (atomic_xchg(&rdev->ih.lock, 1)) si_irq_process()
6452 rptr = rdev->ih.rptr; si_irq_process()
6459 si_irq_ack(rdev); si_irq_process()
6464 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; si_irq_process()
6465 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; si_irq_process()
6466 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; si_irq_process()
6472 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)) si_irq_process()
6475 if (rdev->irq.crtc_vblank_int[0]) { si_irq_process()
6476 drm_handle_vblank(rdev->ddev, 0); si_irq_process()
6477 rdev->pm.vblank_sync = true; si_irq_process()
6478 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6480 if (atomic_read(&rdev->irq.pflip[0])) si_irq_process()
6481 radeon_crtc_handle_vblank(rdev, 0); si_irq_process()
6482 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT; si_irq_process()
6487 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)) si_irq_process()
6490 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT; si_irq_process()
6502 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) si_irq_process()
6505 if (rdev->irq.crtc_vblank_int[1]) { si_irq_process()
6506 drm_handle_vblank(rdev->ddev, 1); si_irq_process()
6507 rdev->pm.vblank_sync = true; si_irq_process()
6508 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6510 if (atomic_read(&rdev->irq.pflip[1])) si_irq_process()
6511 radeon_crtc_handle_vblank(rdev, 1); si_irq_process()
6512 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; si_irq_process()
6517 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)) si_irq_process()
6520 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; si_irq_process()
6532 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) si_irq_process()
6535 if (rdev->irq.crtc_vblank_int[2]) { si_irq_process()
6536 drm_handle_vblank(rdev->ddev, 2); si_irq_process()
6537 rdev->pm.vblank_sync = true; si_irq_process()
6538 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6540 if (atomic_read(&rdev->irq.pflip[2])) si_irq_process()
6541 radeon_crtc_handle_vblank(rdev, 2); si_irq_process()
6542 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; si_irq_process()
6547 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) si_irq_process()
6550 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; si_irq_process()
6562 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) si_irq_process()
6565 if (rdev->irq.crtc_vblank_int[3]) { si_irq_process()
6566 drm_handle_vblank(rdev->ddev, 3); si_irq_process()
6567 rdev->pm.vblank_sync = true; si_irq_process()
6568 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6570 if (atomic_read(&rdev->irq.pflip[3])) si_irq_process()
6571 radeon_crtc_handle_vblank(rdev, 3); si_irq_process()
6572 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; si_irq_process()
6577 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) si_irq_process()
6580 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; si_irq_process()
6592 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) si_irq_process()
6595 if (rdev->irq.crtc_vblank_int[4]) { si_irq_process()
6596 drm_handle_vblank(rdev->ddev, 4); si_irq_process()
6597 rdev->pm.vblank_sync = true; si_irq_process()
6598 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6600 if (atomic_read(&rdev->irq.pflip[4])) si_irq_process()
6601 radeon_crtc_handle_vblank(rdev, 4); si_irq_process()
6602 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; si_irq_process()
6607 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) si_irq_process()
6610 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; si_irq_process()
6622 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) si_irq_process()
6625 if (rdev->irq.crtc_vblank_int[5]) { si_irq_process()
6626 drm_handle_vblank(rdev->ddev, 5); si_irq_process()
6627 rdev->pm.vblank_sync = true; si_irq_process()
6628 wake_up(&rdev->irq.vblank_queue); si_irq_process()
6630 if (atomic_read(&rdev->irq.pflip[5])) si_irq_process()
6631 radeon_crtc_handle_vblank(rdev, 5); si_irq_process()
6632 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; si_irq_process()
6637 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) si_irq_process()
6640 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; si_irq_process()
6657 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); si_irq_process()
6662 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT)) si_irq_process()
6665 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT; si_irq_process()
6671 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT)) si_irq_process()
6674 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT; si_irq_process()
6680 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT)) si_irq_process()
6683 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; si_irq_process()
6689 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT)) si_irq_process()
6692 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; si_irq_process()
6698 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT)) si_irq_process()
6701 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; si_irq_process()
6707 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT)) si_irq_process()
6710 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; si_irq_process()
6716 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT)) si_irq_process()
6719 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT; si_irq_process()
6725 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT)) si_irq_process()
6728 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; si_irq_process()
6734 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) si_irq_process()
6737 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; si_irq_process()
6743 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) si_irq_process()
6746 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; si_irq_process()
6752 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) si_irq_process()
6755 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; si_irq_process()
6761 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) si_irq_process()
6764 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; si_irq_process()
6780 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); si_irq_process()
6790 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); si_irq_process()
6791 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", si_irq_process()
6793 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", si_irq_process()
6795 si_vm_decode_fault(rdev, status, addr); si_irq_process()
6798 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); si_irq_process()
6801 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); si_irq_process()
6804 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); si_irq_process()
6810 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); si_irq_process()
6813 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); si_irq_process()
6816 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); si_irq_process()
6822 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); si_irq_process()
6826 rdev->pm.dpm.thermal.high_to_low = false; si_irq_process()
6831 rdev->pm.dpm.thermal.high_to_low = true; si_irq_process()
6839 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); si_irq_process()
6848 rptr &= rdev->ih.ptr_mask; si_irq_process()
6852 schedule_work(&rdev->dp_work); si_irq_process()
6854 schedule_delayed_work(&rdev->hotplug_work, 0); si_irq_process()
6855 if (queue_thermal && rdev->pm.dpm_enabled) si_irq_process()
6856 schedule_work(&rdev->pm.dpm.thermal.work); si_irq_process()
6857 rdev->ih.rptr = rptr; si_irq_process()
6858 atomic_set(&rdev->ih.lock, 0); si_irq_process()
6861 wptr = si_get_ih_wptr(rdev); si_irq_process()
6871 static int si_startup(struct radeon_device *rdev) si_startup() argument
6877 si_pcie_gen3_enable(rdev); si_startup()
6879 si_program_aspm(rdev); si_startup()
6882 r = r600_vram_scratch_init(rdev); si_startup()
6886 si_mc_program(rdev); si_startup()
6888 if (!rdev->pm.dpm_enabled) { si_startup()
6889 r = si_mc_load_microcode(rdev); si_startup()
6896 r = si_pcie_gart_enable(rdev); si_startup()
6899 si_gpu_init(rdev); si_startup()
6902 if (rdev->family == CHIP_VERDE) { si_startup()
6903 rdev->rlc.reg_list = verde_rlc_save_restore_register_list; si_startup()
6904 rdev->rlc.reg_list_size = si_startup()
6907 rdev->rlc.cs_data = si_cs_data; si_startup()
6908 r = sumo_rlc_init(rdev); si_startup()
6915 r = radeon_wb_init(rdev); si_startup()
6919 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); si_startup()
6921 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); si_startup()
6925 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); si_startup()
6927 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); si_startup()
6931 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); si_startup()
6933 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); si_startup()
6937 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); si_startup()
6939 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); si_startup()
6943 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); si_startup()
6945 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); si_startup()
6949 if (rdev->has_uvd) { si_startup()
6950 r = uvd_v2_2_resume(rdev); si_startup()
6952 r = radeon_fence_driver_start_ring(rdev, si_startup()
6955 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); si_startup()
6958 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; si_startup()
6961 r = radeon_vce_resume(rdev); si_startup()
6963 r = vce_v1_0_resume(rdev); si_startup()
6965 r = radeon_fence_driver_start_ring(rdev, si_startup()
6968 r = radeon_fence_driver_start_ring(rdev, si_startup()
6972 dev_err(rdev->dev, "VCE init error (%d).\n", r); si_startup()
6973 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; si_startup()
6974 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; si_startup()
6978 if (!rdev->irq.installed) { si_startup()
6979 r = radeon_irq_kms_init(rdev); si_startup()
6984 r = si_irq_init(rdev); si_startup()
6987 radeon_irq_kms_fini(rdev); si_startup()
6990 si_irq_set(rdev); si_startup()
6992 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_startup()
6993 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, si_startup()
6998 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; si_startup()
6999 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, si_startup()
7004 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; si_startup()
7005 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, si_startup()
7010 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; si_startup()
7011 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, si_startup()
7016 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; si_startup()
7017 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, si_startup()
7022 r = si_cp_load_microcode(rdev); si_startup()
7025 r = si_cp_resume(rdev); si_startup()
7029 r = cayman_dma_resume(rdev); si_startup()
7033 if (rdev->has_uvd) { si_startup()
7034 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; si_startup()
7036 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, si_startup()
7039 r = uvd_v1_0_init(rdev); si_startup()
7047 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; si_startup()
7049 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, si_startup()
7052 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; si_startup()
7054 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, si_startup()
7058 r = vce_v1_0_init(rdev); si_startup()
7062 r = radeon_ib_pool_init(rdev); si_startup()
7064 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); si_startup()
7068 r = radeon_vm_manager_init(rdev); si_startup()
7070 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); si_startup()
7074 r = radeon_audio_init(rdev); si_startup()
7081 int si_resume(struct radeon_device *rdev) si_resume() argument
7090 atom_asic_init(rdev->mode_info.atom_context); si_resume()
7093 si_init_golden_registers(rdev); si_resume()
7095 if (rdev->pm.pm_method == PM_METHOD_DPM) si_resume()
7096 radeon_pm_resume(rdev); si_resume()
7098 rdev->accel_working = true; si_resume()
7099 r = si_startup(rdev); si_resume()
7102 rdev->accel_working = false; si_resume()
7110 int si_suspend(struct radeon_device *rdev) si_suspend() argument
7112 radeon_pm_suspend(rdev); si_suspend()
7113 radeon_audio_fini(rdev); si_suspend()
7114 radeon_vm_manager_fini(rdev); si_suspend()
7115 si_cp_enable(rdev, false); si_suspend()
7116 cayman_dma_stop(rdev); si_suspend()
7117 if (rdev->has_uvd) { si_suspend()
7118 uvd_v1_0_fini(rdev); si_suspend()
7119 radeon_uvd_suspend(rdev); si_suspend()
7120 radeon_vce_suspend(rdev); si_suspend()
7122 si_fini_pg(rdev); si_suspend()
7123 si_fini_cg(rdev); si_suspend()
7124 si_irq_suspend(rdev); si_suspend()
7125 radeon_wb_disable(rdev); si_suspend()
7126 si_pcie_gart_disable(rdev); si_suspend()
7136 int si_init(struct radeon_device *rdev) si_init() argument
7138 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_init()
7142 if (!radeon_get_bios(rdev)) { si_init()
7143 if (ASIC_IS_AVIVO(rdev)) si_init()
7147 if (!rdev->is_atom_bios) { si_init()
7148 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); si_init()
7151 r = radeon_atombios_init(rdev); si_init()
7156 if (!radeon_card_posted(rdev)) { si_init()
7157 if (!rdev->bios) { si_init()
7158 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); si_init()
7162 atom_asic_init(rdev->mode_info.atom_context); si_init()
7165 si_init_golden_registers(rdev); si_init()
7167 si_scratch_init(rdev); si_init()
7169 radeon_surface_init(rdev); si_init()
7171 radeon_get_clock_info(rdev->ddev); si_init()
7174 r = radeon_fence_driver_init(rdev); si_init()
7179 r = si_mc_init(rdev); si_init()
7183 r = radeon_bo_init(rdev); si_init()
7187 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || si_init()
7188 !rdev->rlc_fw || !rdev->mc_fw) { si_init()
7189 r = si_init_microcode(rdev); si_init()
7197 radeon_pm_init(rdev); si_init()
7199 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; si_init()
7201 r600_ring_init(rdev, ring, 1024 * 1024); si_init()
7203 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; si_init()
7205 r600_ring_init(rdev, ring, 1024 * 1024); si_init()
7207 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; si_init()
7209 r600_ring_init(rdev, ring, 1024 * 1024); si_init()
7211 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; si_init()
7213 r600_ring_init(rdev, ring, 64 * 1024); si_init()
7215 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; si_init()
7217 r600_ring_init(rdev, ring, 64 * 1024); si_init()
7219 if (rdev->has_uvd) { si_init()
7220 r = radeon_uvd_init(rdev); si_init()
7222 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; si_init()
7224 r600_ring_init(rdev, ring, 4096); si_init()
7228 r = radeon_vce_init(rdev); si_init()
7230 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; si_init()
7232 r600_ring_init(rdev, ring, 4096); si_init()
7234 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; si_init()
7236 r600_ring_init(rdev, ring, 4096); si_init()
7239 rdev->ih.ring_obj = NULL; si_init()
7240 r600_ih_ring_init(rdev, 64 * 1024); si_init()
7242 r = r600_pcie_gart_init(rdev); si_init()
7246 rdev->accel_working = true; si_init()
7247 r = si_startup(rdev); si_init()
7249 dev_err(rdev->dev, "disabling GPU acceleration\n"); si_init()
7250 si_cp_fini(rdev); si_init()
7251 cayman_dma_fini(rdev); si_init()
7252 si_irq_fini(rdev); si_init()
7253 sumo_rlc_fini(rdev); si_init()
7254 radeon_wb_fini(rdev); si_init()
7255 radeon_ib_pool_fini(rdev); si_init()
7256 radeon_vm_manager_fini(rdev); si_init()
7257 radeon_irq_kms_fini(rdev); si_init()
7258 si_pcie_gart_fini(rdev); si_init()
7259 rdev->accel_working = false; si_init()
7266 if (!rdev->mc_fw) { si_init()
7274 void si_fini(struct radeon_device *rdev) si_fini() argument
7276 radeon_pm_fini(rdev); si_fini()
7277 si_cp_fini(rdev); si_fini()
7278 cayman_dma_fini(rdev); si_fini()
7279 si_fini_pg(rdev); si_fini()
7280 si_fini_cg(rdev); si_fini()
7281 si_irq_fini(rdev); si_fini()
7282 sumo_rlc_fini(rdev); si_fini()
7283 radeon_wb_fini(rdev); si_fini()
7284 radeon_vm_manager_fini(rdev); si_fini()
7285 radeon_ib_pool_fini(rdev); si_fini()
7286 radeon_irq_kms_fini(rdev); si_fini()
7287 if (rdev->has_uvd) { si_fini()
7288 uvd_v1_0_fini(rdev); si_fini()
7289 radeon_uvd_fini(rdev); si_fini()
7290 radeon_vce_fini(rdev); si_fini()
7292 si_pcie_gart_fini(rdev); si_fini()
7293 r600_vram_scratch_fini(rdev); si_fini()
7294 radeon_gem_fini(rdev); si_fini()
7295 radeon_fence_driver_fini(rdev); si_fini()
7296 radeon_bo_fini(rdev); si_fini()
7297 radeon_atombios_fini(rdev); si_fini()
7298 kfree(rdev->bios); si_fini()
7299 rdev->bios = NULL; si_fini()
7305 * @rdev: radeon_device pointer
7310 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev) si_get_gpu_clock_counter() argument
7314 mutex_lock(&rdev->gpu_clock_mutex); si_get_gpu_clock_counter()
7318 mutex_unlock(&rdev->gpu_clock_mutex); si_get_gpu_clock_counter()
7322 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) si_set_uvd_clocks() argument
7340 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, si_set_uvd_clocks()
7360 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); si_set_uvd_clocks()
7397 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); si_set_uvd_clocks()
7411 static void si_pcie_gen3_enable(struct radeon_device *rdev) si_pcie_gen3_enable() argument
7413 struct pci_dev *root = rdev->pdev->bus->self; si_pcie_gen3_enable()
7419 if (pci_is_root_bus(rdev->pdev->bus)) si_pcie_gen3_enable()
7425 if (rdev->flags & RADEON_IS_IGP) si_pcie_gen3_enable()
7428 if (!(rdev->flags & RADEON_IS_PCIE)) si_pcie_gen3_enable()
7431 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); si_pcie_gen3_enable()
7459 gpu_pos = pci_pcie_cap(rdev->pdev); si_pcie_gen3_enable()
7471 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); si_pcie_gen3_enable()
7477 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); si_pcie_gen3_enable()
7495 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16); si_pcie_gen3_enable()
7500 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); si_pcie_gen3_enable()
7503 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2); si_pcie_gen3_enable()
7521 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16); si_pcie_gen3_enable()
7524 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); si_pcie_gen3_enable()
7532 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); si_pcie_gen3_enable()
7535 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); si_pcie_gen3_enable()
7549 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); si_pcie_gen3_enable()
7557 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); si_pcie_gen3_enable()
7563 for (i = 0; i < rdev->usec_timeout; i++) { si_pcie_gen3_enable()
7571 static void si_program_aspm(struct radeon_device *rdev) si_program_aspm() argument
7580 if (!(rdev->flags & RADEON_IS_PCIE)) si_program_aspm()
7638 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) { si_program_aspm()
7687 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) si_program_aspm()
7694 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) si_program_aspm()
7700 !pci_is_root_bus(rdev->pdev->bus)) { si_program_aspm()
7701 struct pci_dev *root = rdev->pdev->bus->self; si_program_aspm()
7776 int si_vce_send_vcepll_ctlreq(struct radeon_device *rdev) si_vce_send_vcepll_ctlreq() argument
7807 int si_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) si_set_vce_clocks() argument
7828 r = radeon_uvd_calc_upll_dividers(rdev, evclk, ecclk, 125000, 250000, si_set_vce_clocks()
7851 r = si_vce_send_vcepll_ctlreq(rdev); si_set_vce_clocks()
7883 r = si_vce_send_vcepll_ctlreq(rdev); si_set_vce_clocks()
H A Dcik.c119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
122 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
124 extern void sumo_rlc_fini(struct radeon_device *rdev);
125 extern int sumo_rlc_init(struct radeon_device *rdev);
126 extern void si_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
127 extern void si_rlc_reset(struct radeon_device *rdev);
128 extern void si_init_uvd_internal_cg(struct radeon_device *rdev);
129 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
130 extern int cik_sdma_resume(struct radeon_device *rdev);
131 extern void cik_sdma_enable(struct radeon_device *rdev, bool enable);
132 extern void cik_sdma_fini(struct radeon_device *rdev);
133 extern void vce_v2_0_enable_mgcg(struct radeon_device *rdev, bool enable);
134 static void cik_rlc_stop(struct radeon_device *rdev);
135 static void cik_pcie_gen3_enable(struct radeon_device *rdev);
136 static void cik_program_aspm(struct radeon_device *rdev);
137 static void cik_init_pg(struct radeon_device *rdev);
138 static void cik_init_cg(struct radeon_device *rdev);
139 static void cik_fini_pg(struct radeon_device *rdev);
140 static void cik_fini_cg(struct radeon_device *rdev);
141 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev,
147 * @rdev: radeon_device pointer
154 int cik_get_allowed_info_register(struct radeon_device *rdev, cik_get_allowed_info_register() argument
180 u32 cik_didt_rreg(struct radeon_device *rdev, u32 reg) cik_didt_rreg() argument
185 spin_lock_irqsave(&rdev->didt_idx_lock, flags); cik_didt_rreg()
188 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags); cik_didt_rreg()
192 void cik_didt_wreg(struct radeon_device *rdev, u32 reg, u32 v) cik_didt_wreg() argument
196 spin_lock_irqsave(&rdev->didt_idx_lock, flags); cik_didt_wreg()
199 spin_unlock_irqrestore(&rdev->didt_idx_lock, flags); cik_didt_wreg()
203 int ci_get_temp(struct radeon_device *rdev) ci_get_temp() argument
222 int kv_get_temp(struct radeon_device *rdev) kv_get_temp() argument
242 u32 cik_pciep_rreg(struct radeon_device *rdev, u32 reg) cik_pciep_rreg() argument
247 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); cik_pciep_rreg()
251 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); cik_pciep_rreg()
255 void cik_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v) cik_pciep_wreg() argument
259 spin_lock_irqsave(&rdev->pciep_idx_lock, flags); cik_pciep_wreg()
264 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags); cik_pciep_wreg()
1624 static void cik_init_golden_registers(struct radeon_device *rdev) cik_init_golden_registers() argument
1627 mutex_lock(&rdev->grbm_idx_mutex); cik_init_golden_registers()
1628 switch (rdev->family) { cik_init_golden_registers()
1630 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1633 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1636 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1639 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1644 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1647 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1650 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1653 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1658 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1661 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1664 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1667 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1672 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1675 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1678 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1681 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1686 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1689 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1692 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1695 radeon_program_register_sequence(rdev, cik_init_golden_registers()
1702 mutex_unlock(&rdev->grbm_idx_mutex); cik_init_golden_registers()
1708 * @rdev: radeon_device pointer
1713 u32 cik_get_xclk(struct radeon_device *rdev) cik_get_xclk() argument
1715 u32 reference_clock = rdev->clock.spll.reference_freq; cik_get_xclk()
1717 if (rdev->flags & RADEON_IS_IGP) { cik_get_xclk()
1730 * @rdev: radeon_device pointer
1736 u32 cik_mm_rdoorbell(struct radeon_device *rdev, u32 index) cik_mm_rdoorbell() argument
1738 if (index < rdev->doorbell.num_doorbells) { cik_mm_rdoorbell()
1739 return readl(rdev->doorbell.ptr + index); cik_mm_rdoorbell()
1749 * @rdev: radeon_device pointer
1756 void cik_mm_wdoorbell(struct radeon_device *rdev, u32 index, u32 v) cik_mm_wdoorbell() argument
1758 if (index < rdev->doorbell.num_doorbells) { cik_mm_wdoorbell()
1759 writel(v, rdev->doorbell.ptr + index); cik_mm_wdoorbell()
1839 * @rdev: radeon_device pointer
1849 static void cik_srbm_select(struct radeon_device *rdev, cik_srbm_select() argument
1863 * @rdev: radeon_device pointer
1868 int ci_mc_load_microcode(struct radeon_device *rdev) ci_mc_load_microcode() argument
1877 if (!rdev->mc_fw) ci_mc_load_microcode()
1880 if (rdev->new_fw) { ci_mc_load_microcode()
1882 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; ci_mc_load_microcode()
1888 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); ci_mc_load_microcode()
1891 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); ci_mc_load_microcode()
1893 ucode_size = rdev->mc_fw->size / 4; ci_mc_load_microcode()
1895 switch (rdev->family) { ci_mc_load_microcode()
1907 fw_data = (const __be32 *)rdev->mc_fw->data; ci_mc_load_microcode()
1924 if (rdev->new_fw) { ci_mc_load_microcode()
1934 if ((rdev->pdev->device == 0x6649) && ((tmp & 0xff00) == 0x5600)) { ci_mc_load_microcode()
1943 if (rdev->new_fw) ci_mc_load_microcode()
1955 for (i = 0; i < rdev->usec_timeout; i++) { ci_mc_load_microcode()
1960 for (i = 0; i < rdev->usec_timeout; i++) { ci_mc_load_microcode()
1976 * @rdev: radeon_device pointer
1982 static int cik_init_microcode(struct radeon_device *rdev) cik_init_microcode() argument
1996 switch (rdev->family) { cik_init_microcode()
2064 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); cik_init_microcode()
2067 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); cik_init_microcode()
2070 if (rdev->pfp_fw->size != pfp_req_size) { cik_init_microcode()
2073 rdev->pfp_fw->size, fw_name); cik_init_microcode()
2078 err = radeon_ucode_validate(rdev->pfp_fw); cik_init_microcode()
2090 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); cik_init_microcode()
2093 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); cik_init_microcode()
2096 if (rdev->me_fw->size != me_req_size) { cik_init_microcode()
2099 rdev->me_fw->size, fw_name); cik_init_microcode()
2103 err = radeon_ucode_validate(rdev->me_fw); cik_init_microcode()
2115 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); cik_init_microcode()
2118 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); cik_init_microcode()
2121 if (rdev->ce_fw->size != ce_req_size) { cik_init_microcode()
2124 rdev->ce_fw->size, fw_name); cik_init_microcode()
2128 err = radeon_ucode_validate(rdev->ce_fw); cik_init_microcode()
2140 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev); cik_init_microcode()
2143 err = request_firmware(&rdev->mec_fw, fw_name, rdev->dev); cik_init_microcode()
2146 if (rdev->mec_fw->size != mec_req_size) { cik_init_microcode()
2149 rdev->mec_fw->size, fw_name); cik_init_microcode()
2153 err = radeon_ucode_validate(rdev->mec_fw); cik_init_microcode()
2164 if (rdev->family == CHIP_KAVERI) { cik_init_microcode()
2166 err = request_firmware(&rdev->mec2_fw, fw_name, rdev->dev); cik_init_microcode()
2170 err = radeon_ucode_validate(rdev->mec2_fw); cik_init_microcode()
2180 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); cik_init_microcode()
2183 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); cik_init_microcode()
2186 if (rdev->rlc_fw->size != rlc_req_size) { cik_init_microcode()
2189 rdev->rlc_fw->size, fw_name); cik_init_microcode()
2193 err = radeon_ucode_validate(rdev->rlc_fw); cik_init_microcode()
2205 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev); cik_init_microcode()
2208 err = request_firmware(&rdev->sdma_fw, fw_name, rdev->dev); cik_init_microcode()
2211 if (rdev->sdma_fw->size != sdma_req_size) { cik_init_microcode()
2214 rdev->sdma_fw->size, fw_name); cik_init_microcode()
2218 err = radeon_ucode_validate(rdev->sdma_fw); cik_init_microcode()
2230 if (!(rdev->flags & RADEON_IS_IGP)) { cik_init_microcode()
2232 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); cik_init_microcode()
2235 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); cik_init_microcode()
2238 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); cik_init_microcode()
2242 if ((rdev->mc_fw->size != mc_req_size) && cik_init_microcode()
2243 (rdev->mc_fw->size != mc2_req_size)){ cik_init_microcode()
2246 rdev->mc_fw->size, fw_name); cik_init_microcode()
2249 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); cik_init_microcode()
2251 err = radeon_ucode_validate(rdev->mc_fw); cik_init_microcode()
2263 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); cik_init_microcode()
2266 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); cik_init_microcode()
2271 release_firmware(rdev->smc_fw); cik_init_microcode()
2272 rdev->smc_fw = NULL; cik_init_microcode()
2274 } else if (rdev->smc_fw->size != smc_req_size) { cik_init_microcode()
2277 rdev->smc_fw->size, fw_name); cik_init_microcode()
2281 err = radeon_ucode_validate(rdev->smc_fw); cik_init_microcode()
2294 rdev->new_fw = false; cik_init_microcode()
2299 rdev->new_fw = true; cik_init_microcode()
2308 release_firmware(rdev->pfp_fw); cik_init_microcode()
2309 rdev->pfp_fw = NULL; cik_init_microcode()
2310 release_firmware(rdev->me_fw); cik_init_microcode()
2311 rdev->me_fw = NULL; cik_init_microcode()
2312 release_firmware(rdev->ce_fw); cik_init_microcode()
2313 rdev->ce_fw = NULL; cik_init_microcode()
2314 release_firmware(rdev->mec_fw); cik_init_microcode()
2315 rdev->mec_fw = NULL; cik_init_microcode()
2316 release_firmware(rdev->mec2_fw); cik_init_microcode()
2317 rdev->mec2_fw = NULL; cik_init_microcode()
2318 release_firmware(rdev->rlc_fw); cik_init_microcode()
2319 rdev->rlc_fw = NULL; cik_init_microcode()
2320 release_firmware(rdev->sdma_fw); cik_init_microcode()
2321 rdev->sdma_fw = NULL; cik_init_microcode()
2322 release_firmware(rdev->mc_fw); cik_init_microcode()
2323 rdev->mc_fw = NULL; cik_init_microcode()
2324 release_firmware(rdev->smc_fw); cik_init_microcode()
2325 rdev->smc_fw = NULL; cik_init_microcode()
2336 * @rdev: radeon_device pointer
2344 static void cik_tiling_mode_table_init(struct radeon_device *rdev) cik_tiling_mode_table_init() argument
2350 u32 num_rbs = rdev->config.cik.max_backends_per_se * cik_tiling_mode_table_init()
2351 rdev->config.cik.max_shader_engines; cik_tiling_mode_table_init()
2353 switch (rdev->config.cik.mem_row_size_in_kb) { cik_tiling_mode_table_init()
2366 num_pipe_configs = rdev->config.cik.max_tile_pipes; cik_tiling_mode_table_init()
2497 rdev->config.cik.tile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
2590 rdev->config.cik.macrotile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
2720 rdev->config.cik.tile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
2813 rdev->config.cik.macrotile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
2944 rdev->config.cik.tile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
3074 rdev->config.cik.tile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
3168 rdev->config.cik.macrotile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
3298 rdev->config.cik.tile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
3391 rdev->config.cik.macrotile_mode_array[reg_offset] = gb_tile_moden; cik_tiling_mode_table_init()
3401 * @rdev: radeon_device pointer
3409 static void cik_select_se_sh(struct radeon_device *rdev, cik_select_se_sh() argument
3447 * @rdev: radeon_device pointer
3455 static u32 cik_get_rb_disabled(struct radeon_device *rdev, cik_get_rb_disabled() argument
3478 * @rdev: radeon_device pointer
3485 static void cik_setup_rb(struct radeon_device *rdev, cik_setup_rb() argument
3494 mutex_lock(&rdev->grbm_idx_mutex); cik_setup_rb()
3497 cik_select_se_sh(rdev, i, j); cik_setup_rb()
3498 data = cik_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se); cik_setup_rb()
3499 if (rdev->family == CHIP_HAWAII) cik_setup_rb()
3505 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_setup_rb()
3506 mutex_unlock(&rdev->grbm_idx_mutex); cik_setup_rb()
3515 rdev->config.cik.backend_enable_mask = enabled_rbs; cik_setup_rb()
3517 mutex_lock(&rdev->grbm_idx_mutex); cik_setup_rb()
3519 cik_select_se_sh(rdev, i, 0xffffffff); cik_setup_rb()
3544 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_setup_rb()
3545 mutex_unlock(&rdev->grbm_idx_mutex); cik_setup_rb()
3551 * @rdev: radeon_device pointer
3556 static void cik_gpu_init(struct radeon_device *rdev) cik_gpu_init() argument
3564 switch (rdev->family) { cik_gpu_init()
3566 rdev->config.cik.max_shader_engines = 2; cik_gpu_init()
3567 rdev->config.cik.max_tile_pipes = 4; cik_gpu_init()
3568 rdev->config.cik.max_cu_per_sh = 7; cik_gpu_init()
3569 rdev->config.cik.max_sh_per_se = 1; cik_gpu_init()
3570 rdev->config.cik.max_backends_per_se = 2; cik_gpu_init()
3571 rdev->config.cik.max_texture_channel_caches = 4; cik_gpu_init()
3572 rdev->config.cik.max_gprs = 256; cik_gpu_init()
3573 rdev->config.cik.max_gs_threads = 32; cik_gpu_init()
3574 rdev->config.cik.max_hw_contexts = 8; cik_gpu_init()
3576 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; cik_gpu_init()
3577 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; cik_gpu_init()
3578 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; cik_gpu_init()
3579 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; cik_gpu_init()
3583 rdev->config.cik.max_shader_engines = 4; cik_gpu_init()
3584 rdev->config.cik.max_tile_pipes = 16; cik_gpu_init()
3585 rdev->config.cik.max_cu_per_sh = 11; cik_gpu_init()
3586 rdev->config.cik.max_sh_per_se = 1; cik_gpu_init()
3587 rdev->config.cik.max_backends_per_se = 4; cik_gpu_init()
3588 rdev->config.cik.max_texture_channel_caches = 16; cik_gpu_init()
3589 rdev->config.cik.max_gprs = 256; cik_gpu_init()
3590 rdev->config.cik.max_gs_threads = 32; cik_gpu_init()
3591 rdev->config.cik.max_hw_contexts = 8; cik_gpu_init()
3593 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; cik_gpu_init()
3594 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; cik_gpu_init()
3595 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; cik_gpu_init()
3596 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; cik_gpu_init()
3600 rdev->config.cik.max_shader_engines = 1; cik_gpu_init()
3601 rdev->config.cik.max_tile_pipes = 4; cik_gpu_init()
3602 if ((rdev->pdev->device == 0x1304) || cik_gpu_init()
3603 (rdev->pdev->device == 0x1305) || cik_gpu_init()
3604 (rdev->pdev->device == 0x130C) || cik_gpu_init()
3605 (rdev->pdev->device == 0x130F) || cik_gpu_init()
3606 (rdev->pdev->device == 0x1310) || cik_gpu_init()
3607 (rdev->pdev->device == 0x1311) || cik_gpu_init()
3608 (rdev->pdev->device == 0x131C)) { cik_gpu_init()
3609 rdev->config.cik.max_cu_per_sh = 8; cik_gpu_init()
3610 rdev->config.cik.max_backends_per_se = 2; cik_gpu_init()
3611 } else if ((rdev->pdev->device == 0x1309) || cik_gpu_init()
3612 (rdev->pdev->device == 0x130A) || cik_gpu_init()
3613 (rdev->pdev->device == 0x130D) || cik_gpu_init()
3614 (rdev->pdev->device == 0x1313) || cik_gpu_init()
3615 (rdev->pdev->device == 0x131D)) { cik_gpu_init()
3616 rdev->config.cik.max_cu_per_sh = 6; cik_gpu_init()
3617 rdev->config.cik.max_backends_per_se = 2; cik_gpu_init()
3618 } else if ((rdev->pdev->device == 0x1306) || cik_gpu_init()
3619 (rdev->pdev->device == 0x1307) || cik_gpu_init()
3620 (rdev->pdev->device == 0x130B) || cik_gpu_init()
3621 (rdev->pdev->device == 0x130E) || cik_gpu_init()
3622 (rdev->pdev->device == 0x1315) || cik_gpu_init()
3623 (rdev->pdev->device == 0x1318) || cik_gpu_init()
3624 (rdev->pdev->device == 0x131B)) { cik_gpu_init()
3625 rdev->config.cik.max_cu_per_sh = 4; cik_gpu_init()
3626 rdev->config.cik.max_backends_per_se = 1; cik_gpu_init()
3628 rdev->config.cik.max_cu_per_sh = 3; cik_gpu_init()
3629 rdev->config.cik.max_backends_per_se = 1; cik_gpu_init()
3631 rdev->config.cik.max_sh_per_se = 1; cik_gpu_init()
3632 rdev->config.cik.max_texture_channel_caches = 4; cik_gpu_init()
3633 rdev->config.cik.max_gprs = 256; cik_gpu_init()
3634 rdev->config.cik.max_gs_threads = 16; cik_gpu_init()
3635 rdev->config.cik.max_hw_contexts = 8; cik_gpu_init()
3637 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; cik_gpu_init()
3638 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; cik_gpu_init()
3639 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; cik_gpu_init()
3640 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; cik_gpu_init()
3646 rdev->config.cik.max_shader_engines = 1; cik_gpu_init()
3647 rdev->config.cik.max_tile_pipes = 2; cik_gpu_init()
3648 rdev->config.cik.max_cu_per_sh = 2; cik_gpu_init()
3649 rdev->config.cik.max_sh_per_se = 1; cik_gpu_init()
3650 rdev->config.cik.max_backends_per_se = 1; cik_gpu_init()
3651 rdev->config.cik.max_texture_channel_caches = 2; cik_gpu_init()
3652 rdev->config.cik.max_gprs = 256; cik_gpu_init()
3653 rdev->config.cik.max_gs_threads = 16; cik_gpu_init()
3654 rdev->config.cik.max_hw_contexts = 8; cik_gpu_init()
3656 rdev->config.cik.sc_prim_fifo_size_frontend = 0x20; cik_gpu_init()
3657 rdev->config.cik.sc_prim_fifo_size_backend = 0x100; cik_gpu_init()
3658 rdev->config.cik.sc_hiz_tile_fifo_size = 0x30; cik_gpu_init()
3659 rdev->config.cik.sc_earlyz_tile_fifo_size = 0x130; cik_gpu_init()
3682 rdev->config.cik.num_tile_pipes = rdev->config.cik.max_tile_pipes; cik_gpu_init()
3683 rdev->config.cik.mem_max_burst_length_bytes = 256; cik_gpu_init()
3685 rdev->config.cik.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; cik_gpu_init()
3686 if (rdev->config.cik.mem_row_size_in_kb > 4) cik_gpu_init()
3687 rdev->config.cik.mem_row_size_in_kb = 4; cik_gpu_init()
3689 rdev->config.cik.shader_engine_tile_size = 32; cik_gpu_init()
3690 rdev->config.cik.num_gpus = 1; cik_gpu_init()
3691 rdev->config.cik.multi_gpu_tile_size = 64; cik_gpu_init()
3695 switch (rdev->config.cik.mem_row_size_in_kb) { cik_gpu_init()
3715 rdev->config.cik.tile_config = 0; cik_gpu_init()
3716 switch (rdev->config.cik.num_tile_pipes) { cik_gpu_init()
3718 rdev->config.cik.tile_config |= (0 << 0); cik_gpu_init()
3721 rdev->config.cik.tile_config |= (1 << 0); cik_gpu_init()
3724 rdev->config.cik.tile_config |= (2 << 0); cik_gpu_init()
3729 rdev->config.cik.tile_config |= (3 << 0); cik_gpu_init()
3732 rdev->config.cik.tile_config |= cik_gpu_init()
3734 rdev->config.cik.tile_config |= cik_gpu_init()
3736 rdev->config.cik.tile_config |= cik_gpu_init()
3748 cik_tiling_mode_table_init(rdev); cik_gpu_init()
3750 cik_setup_rb(rdev, rdev->config.cik.max_shader_engines, cik_gpu_init()
3751 rdev->config.cik.max_sh_per_se, cik_gpu_init()
3752 rdev->config.cik.max_backends_per_se); cik_gpu_init()
3754 rdev->config.cik.active_cus = 0; cik_gpu_init()
3755 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { cik_gpu_init()
3756 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { cik_gpu_init()
3757 rdev->config.cik.active_cus += cik_gpu_init()
3758 hweight32(cik_get_cu_active_bitmap(rdev, i, j)); cik_gpu_init()
3765 mutex_lock(&rdev->grbm_idx_mutex); cik_gpu_init()
3770 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_gpu_init()
3797 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_frontend) | cik_gpu_init()
3798 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.cik.sc_prim_fifo_size_backend) | cik_gpu_init()
3799 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cik.sc_hiz_tile_fifo_size) | cik_gpu_init()
3800 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cik.sc_earlyz_tile_fifo_size))); cik_gpu_init()
3826 mutex_unlock(&rdev->grbm_idx_mutex); cik_gpu_init()
3837 * @rdev: radeon_device pointer
3844 static void cik_scratch_init(struct radeon_device *rdev) cik_scratch_init() argument
3848 rdev->scratch.num_reg = 7; cik_scratch_init()
3849 rdev->scratch.reg_base = SCRATCH_REG0; cik_scratch_init()
3850 for (i = 0; i < rdev->scratch.num_reg; i++) { cik_scratch_init()
3851 rdev->scratch.free[i] = true; cik_scratch_init()
3852 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); cik_scratch_init()
3859 * @rdev: radeon_device pointer
3867 int cik_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_ring_test() argument
3874 r = radeon_scratch_get(rdev, &scratch); cik_ring_test()
3880 r = radeon_ring_lock(rdev, ring, 3); cik_ring_test()
3883 radeon_scratch_free(rdev, scratch); cik_ring_test()
3889 radeon_ring_unlock_commit(rdev, ring, false); cik_ring_test()
3891 for (i = 0; i < rdev->usec_timeout; i++) { cik_ring_test()
3897 if (i < rdev->usec_timeout) { cik_ring_test()
3904 radeon_scratch_free(rdev, scratch); cik_ring_test()
3911 * @rdev: radeon_device pointer
3916 static void cik_hdp_flush_cp_ring_emit(struct radeon_device *rdev, cik_hdp_flush_cp_ring_emit() argument
3919 struct radeon_ring *ring = &rdev->ring[ridx]; cik_hdp_flush_cp_ring_emit()
3956 * @rdev: radeon_device pointer
3962 void cik_fence_gfx_ring_emit(struct radeon_device *rdev, cik_fence_gfx_ring_emit() argument
3965 struct radeon_ring *ring = &rdev->ring[fence->ring]; cik_fence_gfx_ring_emit()
3966 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; cik_fence_gfx_ring_emit()
3997 * @rdev: radeon_device pointer
4003 void cik_fence_compute_ring_emit(struct radeon_device *rdev, cik_fence_compute_ring_emit() argument
4006 struct radeon_ring *ring = &rdev->ring[fence->ring]; cik_fence_compute_ring_emit()
4007 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; cik_fence_compute_ring_emit()
4025 * @rdev: radeon_device pointer
4033 bool cik_semaphore_ring_emit(struct radeon_device *rdev, cik_semaphore_ring_emit() argument
4057 * @rdev: radeon_device pointer
4067 struct radeon_fence *cik_copy_cpdma(struct radeon_device *rdev, cik_copy_cpdma() argument
4074 int ring_index = rdev->asic->copy.blit_ring_index; cik_copy_cpdma()
4075 struct radeon_ring *ring = &rdev->ring[ring_index]; cik_copy_cpdma()
4084 r = radeon_ring_lock(rdev, ring, num_loops * 7 + 18); cik_copy_cpdma()
4087 radeon_sync_free(rdev, &sync, NULL); cik_copy_cpdma()
4091 radeon_sync_resv(rdev, &sync, resv, false); cik_copy_cpdma()
4092 radeon_sync_rings(rdev, &sync, ring->idx); cik_copy_cpdma()
4113 r = radeon_fence_emit(rdev, &fence, ring->idx); cik_copy_cpdma()
4115 radeon_ring_unlock_undo(rdev, ring); cik_copy_cpdma()
4116 radeon_sync_free(rdev, &sync, NULL); cik_copy_cpdma()
4120 radeon_ring_unlock_commit(rdev, ring, false); cik_copy_cpdma()
4121 radeon_sync_free(rdev, &sync, fence); cik_copy_cpdma()
4132 * @rdev: radeon_device pointer
4141 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) cik_ring_ib_execute() argument
4143 struct radeon_ring *ring = &rdev->ring[ib->ring]; cik_ring_ib_execute()
4161 } else if (rdev->wb.enabled) { cik_ring_ib_execute()
4184 * @rdev: radeon_device pointer
4191 int cik_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_ib_test() argument
4199 r = radeon_scratch_get(rdev, &scratch); cik_ib_test()
4205 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); cik_ib_test()
4208 radeon_scratch_free(rdev, scratch); cik_ib_test()
4215 r = radeon_ib_schedule(rdev, &ib, NULL, false); cik_ib_test()
4217 radeon_scratch_free(rdev, scratch); cik_ib_test()
4218 radeon_ib_free(rdev, &ib); cik_ib_test()
4225 radeon_scratch_free(rdev, scratch); cik_ib_test()
4226 radeon_ib_free(rdev, &ib); cik_ib_test()
4229 for (i = 0; i < rdev->usec_timeout; i++) { cik_ib_test()
4235 if (i < rdev->usec_timeout) { cik_ib_test()
4242 radeon_scratch_free(rdev, scratch); cik_ib_test()
4243 radeon_ib_free(rdev, &ib); cik_ib_test()
4273 * @rdev: radeon_device pointer
4278 static void cik_cp_gfx_enable(struct radeon_device *rdev, bool enable) cik_cp_gfx_enable() argument
4283 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) cik_cp_gfx_enable()
4284 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); cik_cp_gfx_enable()
4286 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; cik_cp_gfx_enable()
4294 * @rdev: radeon_device pointer
4299 static int cik_cp_gfx_load_microcode(struct radeon_device *rdev) cik_cp_gfx_load_microcode() argument
4303 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) cik_cp_gfx_load_microcode()
4306 cik_cp_gfx_enable(rdev, false); cik_cp_gfx_load_microcode()
4308 if (rdev->new_fw) { cik_cp_gfx_load_microcode()
4310 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; cik_cp_gfx_load_microcode()
4312 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; cik_cp_gfx_load_microcode()
4314 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; cik_cp_gfx_load_microcode()
4324 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); cik_cp_gfx_load_microcode()
4333 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); cik_cp_gfx_load_microcode()
4342 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); cik_cp_gfx_load_microcode()
4353 fw_data = (const __be32 *)rdev->pfp_fw->data; cik_cp_gfx_load_microcode()
4360 fw_data = (const __be32 *)rdev->ce_fw->data; cik_cp_gfx_load_microcode()
4367 fw_data = (const __be32 *)rdev->me_fw->data; cik_cp_gfx_load_microcode()
4380 * @rdev: radeon_device pointer
4386 static int cik_cp_gfx_start(struct radeon_device *rdev) cik_cp_gfx_start() argument
4388 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cik_cp_gfx_start()
4392 WREG32(CP_MAX_CONTEXT, rdev->config.cik.max_hw_contexts - 1); cik_cp_gfx_start()
4396 cik_cp_gfx_enable(rdev, true); cik_cp_gfx_start()
4398 r = radeon_ring_lock(rdev, ring, cik_default_size + 17); cik_cp_gfx_start()
4433 radeon_ring_unlock_commit(rdev, ring, false); cik_cp_gfx_start()
4441 * @rdev: radeon_device pointer
4446 static void cik_cp_gfx_fini(struct radeon_device *rdev) cik_cp_gfx_fini() argument
4448 cik_cp_gfx_enable(rdev, false); cik_cp_gfx_fini()
4449 radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); cik_cp_gfx_fini()
4455 * @rdev: radeon_device pointer
4461 static int cik_cp_gfx_resume(struct radeon_device *rdev) cik_cp_gfx_resume() argument
4470 if (rdev->family != CHIP_HAWAII) cik_cp_gfx_resume()
4479 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); cik_cp_gfx_resume()
4483 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cik_cp_gfx_resume()
4497 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); cik_cp_gfx_resume()
4498 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); cik_cp_gfx_resume()
4503 if (!rdev->wb.enabled) cik_cp_gfx_resume()
4514 cik_cp_gfx_start(rdev); cik_cp_gfx_resume()
4515 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; cik_cp_gfx_resume()
4516 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); cik_cp_gfx_resume()
4518 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; cik_cp_gfx_resume()
4522 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) cik_cp_gfx_resume()
4523 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); cik_cp_gfx_resume()
4528 u32 cik_gfx_get_rptr(struct radeon_device *rdev, cik_gfx_get_rptr() argument
4533 if (rdev->wb.enabled) cik_gfx_get_rptr()
4534 rptr = rdev->wb.wb[ring->rptr_offs/4]; cik_gfx_get_rptr()
4541 u32 cik_gfx_get_wptr(struct radeon_device *rdev, cik_gfx_get_wptr() argument
4551 void cik_gfx_set_wptr(struct radeon_device *rdev, cik_gfx_set_wptr() argument
4558 u32 cik_compute_get_rptr(struct radeon_device *rdev, cik_compute_get_rptr() argument
4563 if (rdev->wb.enabled) { cik_compute_get_rptr()
4564 rptr = rdev->wb.wb[ring->rptr_offs/4]; cik_compute_get_rptr()
4566 mutex_lock(&rdev->srbm_mutex); cik_compute_get_rptr()
4567 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); cik_compute_get_rptr()
4569 cik_srbm_select(rdev, 0, 0, 0, 0); cik_compute_get_rptr()
4570 mutex_unlock(&rdev->srbm_mutex); cik_compute_get_rptr()
4576 u32 cik_compute_get_wptr(struct radeon_device *rdev, cik_compute_get_wptr() argument
4581 if (rdev->wb.enabled) { cik_compute_get_wptr()
4583 wptr = rdev->wb.wb[ring->wptr_offs/4]; cik_compute_get_wptr()
4585 mutex_lock(&rdev->srbm_mutex); cik_compute_get_wptr()
4586 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); cik_compute_get_wptr()
4588 cik_srbm_select(rdev, 0, 0, 0, 0); cik_compute_get_wptr()
4589 mutex_unlock(&rdev->srbm_mutex); cik_compute_get_wptr()
4595 void cik_compute_set_wptr(struct radeon_device *rdev, cik_compute_set_wptr() argument
4599 rdev->wb.wb[ring->wptr_offs/4] = ring->wptr; cik_compute_set_wptr()
4603 static void cik_compute_stop(struct radeon_device *rdev, cik_compute_stop() argument
4608 cik_srbm_select(rdev, ring->me, ring->pipe, ring->queue, 0); cik_compute_stop()
4616 for (j = 0; j < rdev->usec_timeout; j++) { cik_compute_stop()
4625 cik_srbm_select(rdev, 0, 0, 0, 0); cik_compute_stop()
4631 * @rdev: radeon_device pointer
4636 static void cik_cp_compute_enable(struct radeon_device *rdev, bool enable) cik_cp_compute_enable() argument
4645 mutex_lock(&rdev->srbm_mutex); cik_cp_compute_enable()
4646 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); cik_cp_compute_enable()
4647 cik_compute_stop(rdev,&rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); cik_cp_compute_enable()
4648 mutex_unlock(&rdev->srbm_mutex); cik_cp_compute_enable()
4651 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; cik_cp_compute_enable()
4652 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; cik_cp_compute_enable()
4660 * @rdev: radeon_device pointer
4665 static int cik_cp_compute_load_microcode(struct radeon_device *rdev) cik_cp_compute_load_microcode() argument
4669 if (!rdev->mec_fw) cik_cp_compute_load_microcode()
4672 cik_cp_compute_enable(rdev, false); cik_cp_compute_load_microcode()
4674 if (rdev->new_fw) { cik_cp_compute_load_microcode()
4676 (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data; cik_cp_compute_load_microcode()
4684 (rdev->mec_fw->data + le32_to_cpu(mec_hdr->header.ucode_array_offset_bytes)); cik_cp_compute_load_microcode()
4692 if (rdev->family == CHIP_KAVERI) { cik_cp_compute_load_microcode()
4694 (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data; cik_cp_compute_load_microcode()
4697 (rdev->mec2_fw->data + cik_cp_compute_load_microcode()
4709 fw_data = (const __be32 *)rdev->mec_fw->data; cik_cp_compute_load_microcode()
4715 if (rdev->family == CHIP_KAVERI) { cik_cp_compute_load_microcode()
4717 fw_data = (const __be32 *)rdev->mec_fw->data; cik_cp_compute_load_microcode()
4731 * @rdev: radeon_device pointer
4736 static int cik_cp_compute_start(struct radeon_device *rdev) cik_cp_compute_start() argument
4738 cik_cp_compute_enable(rdev, true); cik_cp_compute_start()
4746 * @rdev: radeon_device pointer
4751 static void cik_cp_compute_fini(struct radeon_device *rdev) cik_cp_compute_fini() argument
4755 cik_cp_compute_enable(rdev, false); cik_cp_compute_fini()
4763 if (rdev->ring[idx].mqd_obj) { cik_cp_compute_fini()
4764 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false); cik_cp_compute_fini()
4766 dev_warn(rdev->dev, "(%d) reserve MQD bo failed\n", r); cik_cp_compute_fini()
4768 radeon_bo_unpin(rdev->ring[idx].mqd_obj); cik_cp_compute_fini()
4769 radeon_bo_unreserve(rdev->ring[idx].mqd_obj); cik_cp_compute_fini()
4771 radeon_bo_unref(&rdev->ring[idx].mqd_obj); cik_cp_compute_fini()
4772 rdev->ring[idx].mqd_obj = NULL; cik_cp_compute_fini()
4777 static void cik_mec_fini(struct radeon_device *rdev) cik_mec_fini() argument
4781 if (rdev->mec.hpd_eop_obj) { cik_mec_fini()
4782 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false); cik_mec_fini()
4784 dev_warn(rdev->dev, "(%d) reserve HPD EOP bo failed\n", r); cik_mec_fini()
4785 radeon_bo_unpin(rdev->mec.hpd_eop_obj); cik_mec_fini()
4786 radeon_bo_unreserve(rdev->mec.hpd_eop_obj); cik_mec_fini()
4788 radeon_bo_unref(&rdev->mec.hpd_eop_obj); cik_mec_fini()
4789 rdev->mec.hpd_eop_obj = NULL; cik_mec_fini()
4795 static int cik_mec_init(struct radeon_device *rdev) cik_mec_init() argument
4806 rdev->mec.num_mec = 1; cik_mec_init()
4807 rdev->mec.num_pipe = 1; cik_mec_init()
4808 rdev->mec.num_queue = rdev->mec.num_mec * rdev->mec.num_pipe * 8; cik_mec_init()
4810 if (rdev->mec.hpd_eop_obj == NULL) { cik_mec_init()
4811 r = radeon_bo_create(rdev, cik_mec_init()
4812 rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2, cik_mec_init()
4815 &rdev->mec.hpd_eop_obj); cik_mec_init()
4817 dev_warn(rdev->dev, "(%d) create HDP EOP bo failed\n", r); cik_mec_init()
4822 r = radeon_bo_reserve(rdev->mec.hpd_eop_obj, false); cik_mec_init()
4824 cik_mec_fini(rdev); cik_mec_init()
4827 r = radeon_bo_pin(rdev->mec.hpd_eop_obj, RADEON_GEM_DOMAIN_GTT, cik_mec_init()
4828 &rdev->mec.hpd_eop_gpu_addr); cik_mec_init()
4830 dev_warn(rdev->dev, "(%d) pin HDP EOP bo failed\n", r); cik_mec_init()
4831 cik_mec_fini(rdev); cik_mec_init()
4834 r = radeon_bo_kmap(rdev->mec.hpd_eop_obj, (void **)&hpd); cik_mec_init()
4836 dev_warn(rdev->dev, "(%d) map HDP EOP bo failed\n", r); cik_mec_init()
4837 cik_mec_fini(rdev); cik_mec_init()
4842 memset(hpd, 0, rdev->mec.num_mec *rdev->mec.num_pipe * MEC_HPD_SIZE * 2); cik_mec_init()
4844 radeon_bo_kunmap(rdev->mec.hpd_eop_obj); cik_mec_init()
4845 radeon_bo_unreserve(rdev->mec.hpd_eop_obj); cik_mec_init()
4920 * @rdev: radeon_device pointer
4926 static int cik_cp_compute_resume(struct radeon_device *rdev) cik_cp_compute_resume() argument
4938 r = cik_cp_compute_start(rdev); cik_cp_compute_resume()
4948 mutex_lock(&rdev->srbm_mutex); cik_cp_compute_resume()
4950 eop_gpu_addr = rdev->mec.hpd_eop_gpu_addr; cik_cp_compute_resume()
4952 cik_srbm_select(rdev, 0, 0, 0, 0); cik_cp_compute_resume()
4967 mutex_unlock(&rdev->srbm_mutex); cik_cp_compute_resume()
4976 if (rdev->ring[idx].mqd_obj == NULL) { cik_cp_compute_resume()
4977 r = radeon_bo_create(rdev, cik_cp_compute_resume()
4981 NULL, &rdev->ring[idx].mqd_obj); cik_cp_compute_resume()
4983 dev_warn(rdev->dev, "(%d) create MQD bo failed\n", r); cik_cp_compute_resume()
4988 r = radeon_bo_reserve(rdev->ring[idx].mqd_obj, false); cik_cp_compute_resume()
4990 cik_cp_compute_fini(rdev); cik_cp_compute_resume()
4993 r = radeon_bo_pin(rdev->ring[idx].mqd_obj, RADEON_GEM_DOMAIN_GTT, cik_cp_compute_resume()
4996 dev_warn(rdev->dev, "(%d) pin MQD bo failed\n", r); cik_cp_compute_resume()
4997 cik_cp_compute_fini(rdev); cik_cp_compute_resume()
5000 r = radeon_bo_kmap(rdev->ring[idx].mqd_obj, (void **)&buf); cik_cp_compute_resume()
5002 dev_warn(rdev->dev, "(%d) map MQD bo failed\n", r); cik_cp_compute_resume()
5003 cik_cp_compute_fini(rdev); cik_cp_compute_resume()
5017 mutex_lock(&rdev->srbm_mutex); cik_cp_compute_resume()
5018 cik_srbm_select(rdev, rdev->ring[idx].me, cik_cp_compute_resume()
5019 rdev->ring[idx].pipe, cik_cp_compute_resume()
5020 rdev->ring[idx].queue, 0); cik_cp_compute_resume()
5043 for (j = 0; j < rdev->usec_timeout; j++) { cik_cp_compute_resume()
5064 hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8; cik_cp_compute_resume()
5076 order_base_2(rdev->ring[idx].ring_size / 8); cik_cp_compute_resume()
5090 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP1_WPTR_OFFSET; cik_cp_compute_resume()
5092 wb_gpu_addr = rdev->wb.gpu_addr + CIK_WB_CP2_WPTR_OFFSET; cik_cp_compute_resume()
5101 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET; cik_cp_compute_resume()
5103 wb_gpu_addr = rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET; cik_cp_compute_resume()
5118 DOORBELL_OFFSET(rdev->ring[idx].doorbell_index); cik_cp_compute_resume()
5130 rdev->ring[idx].wptr = 0; cik_cp_compute_resume()
5131 mqd->queue_state.cp_hqd_pq_wptr = rdev->ring[idx].wptr; cik_cp_compute_resume()
5143 cik_srbm_select(rdev, 0, 0, 0, 0); cik_cp_compute_resume()
5144 mutex_unlock(&rdev->srbm_mutex); cik_cp_compute_resume()
5146 radeon_bo_kunmap(rdev->ring[idx].mqd_obj); cik_cp_compute_resume()
5147 radeon_bo_unreserve(rdev->ring[idx].mqd_obj); cik_cp_compute_resume()
5149 rdev->ring[idx].ready = true; cik_cp_compute_resume()
5150 r = radeon_ring_test(rdev, idx, &rdev->ring[idx]); cik_cp_compute_resume()
5152 rdev->ring[idx].ready = false; cik_cp_compute_resume()
5158 static void cik_cp_enable(struct radeon_device *rdev, bool enable) cik_cp_enable() argument
5160 cik_cp_gfx_enable(rdev, enable); cik_cp_enable()
5161 cik_cp_compute_enable(rdev, enable); cik_cp_enable()
5164 static int cik_cp_load_microcode(struct radeon_device *rdev) cik_cp_load_microcode() argument
5168 r = cik_cp_gfx_load_microcode(rdev); cik_cp_load_microcode()
5171 r = cik_cp_compute_load_microcode(rdev); cik_cp_load_microcode()
5178 static void cik_cp_fini(struct radeon_device *rdev) cik_cp_fini() argument
5180 cik_cp_gfx_fini(rdev); cik_cp_fini()
5181 cik_cp_compute_fini(rdev); cik_cp_fini()
5184 static int cik_cp_resume(struct radeon_device *rdev) cik_cp_resume() argument
5188 cik_enable_gui_idle_interrupt(rdev, false); cik_cp_resume()
5190 r = cik_cp_load_microcode(rdev); cik_cp_resume()
5194 r = cik_cp_gfx_resume(rdev); cik_cp_resume()
5197 r = cik_cp_compute_resume(rdev); cik_cp_resume()
5201 cik_enable_gui_idle_interrupt(rdev, true); cik_cp_resume()
5206 static void cik_print_gpu_status_regs(struct radeon_device *rdev) cik_print_gpu_status_regs() argument
5208 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", cik_print_gpu_status_regs()
5210 dev_info(rdev->dev, " GRBM_STATUS2=0x%08X\n", cik_print_gpu_status_regs()
5212 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", cik_print_gpu_status_regs()
5214 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", cik_print_gpu_status_regs()
5216 dev_info(rdev->dev, " GRBM_STATUS_SE2=0x%08X\n", cik_print_gpu_status_regs()
5218 dev_info(rdev->dev, " GRBM_STATUS_SE3=0x%08X\n", cik_print_gpu_status_regs()
5220 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", cik_print_gpu_status_regs()
5222 dev_info(rdev->dev, " SRBM_STATUS2=0x%08X\n", cik_print_gpu_status_regs()
5224 dev_info(rdev->dev, " SDMA0_STATUS_REG = 0x%08X\n", cik_print_gpu_status_regs()
5226 dev_info(rdev->dev, " SDMA1_STATUS_REG = 0x%08X\n", cik_print_gpu_status_regs()
5228 dev_info(rdev->dev, " CP_STAT = 0x%08x\n", RREG32(CP_STAT)); cik_print_gpu_status_regs()
5229 dev_info(rdev->dev, " CP_STALLED_STAT1 = 0x%08x\n", cik_print_gpu_status_regs()
5231 dev_info(rdev->dev, " CP_STALLED_STAT2 = 0x%08x\n", cik_print_gpu_status_regs()
5233 dev_info(rdev->dev, " CP_STALLED_STAT3 = 0x%08x\n", cik_print_gpu_status_regs()
5235 dev_info(rdev->dev, " CP_CPF_BUSY_STAT = 0x%08x\n", cik_print_gpu_status_regs()
5237 dev_info(rdev->dev, " CP_CPF_STALLED_STAT1 = 0x%08x\n", cik_print_gpu_status_regs()
5239 dev_info(rdev->dev, " CP_CPF_STATUS = 0x%08x\n", RREG32(CP_CPF_STATUS)); cik_print_gpu_status_regs()
5240 dev_info(rdev->dev, " CP_CPC_BUSY_STAT = 0x%08x\n", RREG32(CP_CPC_BUSY_STAT)); cik_print_gpu_status_regs()
5241 dev_info(rdev->dev, " CP_CPC_STALLED_STAT1 = 0x%08x\n", cik_print_gpu_status_regs()
5243 dev_info(rdev->dev, " CP_CPC_STATUS = 0x%08x\n", RREG32(CP_CPC_STATUS)); cik_print_gpu_status_regs()
5249 * @rdev: radeon_device pointer
5255 u32 cik_gpu_check_soft_reset(struct radeon_device *rdev) cik_gpu_check_soft_reset() argument
5315 if (evergreen_is_display_hung(rdev)) cik_gpu_check_soft_reset()
5330 * @rdev: radeon_device pointer
5335 static void cik_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) cik_gpu_soft_reset() argument
5344 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); cik_gpu_soft_reset()
5346 cik_print_gpu_status_regs(rdev); cik_gpu_soft_reset()
5347 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", cik_gpu_soft_reset()
5349 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", cik_gpu_soft_reset()
5353 cik_fini_pg(rdev); cik_gpu_soft_reset()
5354 cik_fini_cg(rdev); cik_gpu_soft_reset()
5357 cik_rlc_stop(rdev); cik_gpu_soft_reset()
5378 evergreen_mc_stop(rdev, &save); cik_gpu_soft_reset()
5379 if (evergreen_mc_wait_for_idle(rdev)) { cik_gpu_soft_reset()
5380 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); cik_gpu_soft_reset()
5416 if (!(rdev->flags & RADEON_IS_IGP)) { cik_gpu_soft_reset()
5424 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); cik_gpu_soft_reset()
5438 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); cik_gpu_soft_reset()
5452 evergreen_mc_resume(rdev, &save); cik_gpu_soft_reset()
5455 cik_print_gpu_status_regs(rdev); cik_gpu_soft_reset()
5464 static void kv_save_regs_for_reset(struct radeon_device *rdev, kv_save_regs_for_reset() argument
5476 static void kv_restore_regs_for_reset(struct radeon_device *rdev, kv_restore_regs_for_reset() argument
5549 static void cik_gpu_pci_config_reset(struct radeon_device *rdev) cik_gpu_pci_config_reset() argument
5555 dev_info(rdev->dev, "GPU pci config reset\n"); cik_gpu_pci_config_reset()
5560 cik_fini_pg(rdev); cik_gpu_pci_config_reset()
5561 cik_fini_cg(rdev); cik_gpu_pci_config_reset()
5580 cik_rlc_stop(rdev); cik_gpu_pci_config_reset()
5585 evergreen_mc_stop(rdev, &save); cik_gpu_pci_config_reset()
5586 if (evergreen_mc_wait_for_idle(rdev)) { cik_gpu_pci_config_reset()
5587 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); cik_gpu_pci_config_reset()
5590 if (rdev->flags & RADEON_IS_IGP) cik_gpu_pci_config_reset()
5591 kv_save_regs_for_reset(rdev, &kv_save); cik_gpu_pci_config_reset()
5594 pci_clear_master(rdev->pdev); cik_gpu_pci_config_reset()
5596 radeon_pci_config_reset(rdev); cik_gpu_pci_config_reset()
5601 for (i = 0; i < rdev->usec_timeout; i++) { cik_gpu_pci_config_reset()
5608 if (rdev->flags & RADEON_IS_IGP) cik_gpu_pci_config_reset()
5609 kv_restore_regs_for_reset(rdev, &kv_save); cik_gpu_pci_config_reset()
5615 * @rdev: radeon_device pointer
5621 int cik_asic_reset(struct radeon_device *rdev) cik_asic_reset() argument
5625 reset_mask = cik_gpu_check_soft_reset(rdev); cik_asic_reset()
5628 r600_set_bios_scratch_engine_hung(rdev, true); cik_asic_reset()
5631 cik_gpu_soft_reset(rdev, reset_mask); cik_asic_reset()
5633 reset_mask = cik_gpu_check_soft_reset(rdev); cik_asic_reset()
5637 cik_gpu_pci_config_reset(rdev); cik_asic_reset()
5639 reset_mask = cik_gpu_check_soft_reset(rdev); cik_asic_reset()
5642 r600_set_bios_scratch_engine_hung(rdev, false); cik_asic_reset()
5650 * @rdev: radeon_device pointer
5656 bool cik_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cik_gfx_is_lockup() argument
5658 u32 reset_mask = cik_gpu_check_soft_reset(rdev); cik_gfx_is_lockup()
5663 radeon_ring_lockup_update(rdev, ring); cik_gfx_is_lockup()
5666 return radeon_ring_test_lockup(rdev, ring); cik_gfx_is_lockup()
5673 * @rdev: radeon_device pointer
5678 static void cik_mc_program(struct radeon_device *rdev) cik_mc_program() argument
5694 evergreen_mc_stop(rdev, &save); cik_mc_program()
5695 if (radeon_mc_wait_for_idle(rdev)) { cik_mc_program()
5696 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); cik_mc_program()
5702 rdev->mc.vram_start >> 12); cik_mc_program()
5704 rdev->mc.vram_end >> 12); cik_mc_program()
5706 rdev->vram_scratch.gpu_addr >> 12); cik_mc_program()
5707 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; cik_mc_program()
5708 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); cik_mc_program()
5711 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); cik_mc_program()
5717 if (radeon_mc_wait_for_idle(rdev)) { cik_mc_program()
5718 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); cik_mc_program()
5720 evergreen_mc_resume(rdev, &save); cik_mc_program()
5723 rv515_vga_render_disable(rdev); cik_mc_program()
5729 * @rdev: radeon_device pointer
5735 static int cik_mc_init(struct radeon_device *rdev) cik_mc_init() argument
5741 rdev->mc.vram_is_ddr = true; cik_mc_init()
5779 rdev->mc.vram_width = numchan * chansize; cik_mc_init()
5781 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); cik_mc_init()
5782 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); cik_mc_init()
5784 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; cik_mc_init()
5785 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; cik_mc_init()
5786 rdev->mc.visible_vram_size = rdev->mc.aper_size; cik_mc_init()
5787 si_vram_gtt_location(rdev, &rdev->mc); cik_mc_init()
5788 radeon_update_bandwidth_info(rdev); cik_mc_init()
5802 * @rdev: radeon_device pointer
5806 void cik_pcie_gart_tlb_flush(struct radeon_device *rdev) cik_pcie_gart_tlb_flush() argument
5815 static void cik_pcie_init_compute_vmid(struct radeon_device *rdev) cik_pcie_init_compute_vmid() argument
5824 mutex_lock(&rdev->srbm_mutex); cik_pcie_init_compute_vmid()
5826 cik_srbm_select(rdev, 0, 0, 0, i); cik_pcie_init_compute_vmid()
5833 cik_srbm_select(rdev, 0, 0, 0, 0); cik_pcie_init_compute_vmid()
5834 mutex_unlock(&rdev->srbm_mutex); cik_pcie_init_compute_vmid()
5840 * @rdev: radeon_device pointer
5848 static int cik_pcie_gart_enable(struct radeon_device *rdev) cik_pcie_gart_enable() argument
5852 if (rdev->gart.robj == NULL) { cik_pcie_gart_enable()
5853 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); cik_pcie_gart_enable()
5856 r = radeon_gart_table_vram_pin(rdev); cik_pcie_gart_enable()
5879 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); cik_pcie_gart_enable()
5880 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); cik_pcie_gart_enable()
5881 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); cik_pcie_gart_enable()
5883 (u32)(rdev->dummy_page.addr >> 12)); cik_pcie_gart_enable()
5895 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); cik_pcie_gart_enable()
5899 rdev->vm_manager.saved_table_addr[i]); cik_pcie_gart_enable()
5902 rdev->vm_manager.saved_table_addr[i]); cik_pcie_gart_enable()
5907 (u32)(rdev->dummy_page.addr >> 12)); cik_pcie_gart_enable()
5924 if (rdev->family == CHIP_KAVERI) { cik_pcie_gart_enable()
5932 mutex_lock(&rdev->srbm_mutex); cik_pcie_gart_enable()
5934 cik_srbm_select(rdev, 0, 0, 0, i); cik_pcie_gart_enable()
5947 cik_srbm_select(rdev, 0, 0, 0, 0); cik_pcie_gart_enable()
5948 mutex_unlock(&rdev->srbm_mutex); cik_pcie_gart_enable()
5950 cik_pcie_init_compute_vmid(rdev); cik_pcie_gart_enable()
5952 cik_pcie_gart_tlb_flush(rdev); cik_pcie_gart_enable()
5954 (unsigned)(rdev->mc.gtt_size >> 20), cik_pcie_gart_enable()
5955 (unsigned long long)rdev->gart.table_addr); cik_pcie_gart_enable()
5956 rdev->gart.ready = true; cik_pcie_gart_enable()
5963 * @rdev: radeon_device pointer
5967 static void cik_pcie_gart_disable(struct radeon_device *rdev) cik_pcie_gart_disable() argument
5977 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); cik_pcie_gart_disable()
5996 radeon_gart_table_vram_unpin(rdev); cik_pcie_gart_disable()
6002 * @rdev: radeon_device pointer
6006 static void cik_pcie_gart_fini(struct radeon_device *rdev) cik_pcie_gart_fini() argument
6008 cik_pcie_gart_disable(rdev); cik_pcie_gart_fini()
6009 radeon_gart_table_vram_free(rdev); cik_pcie_gart_fini()
6010 radeon_gart_fini(rdev); cik_pcie_gart_fini()
6017 * @rdev: radeon_device pointer
6022 int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) cik_ib_parse() argument
6036 * @rdev: radeon_device pointer
6042 int cik_vm_init(struct radeon_device *rdev) cik_vm_init() argument
6050 rdev->vm_manager.nvm = RADEON_NUM_OF_VMIDS; cik_vm_init()
6052 if (rdev->flags & RADEON_IS_IGP) { cik_vm_init()
6055 rdev->vm_manager.vram_base_offset = tmp; cik_vm_init()
6057 rdev->vm_manager.vram_base_offset = 0; cik_vm_init()
6065 * @rdev: radeon_device pointer
6069 void cik_vm_fini(struct radeon_device *rdev) cik_vm_fini() argument
6076 * @rdev: radeon_device pointer
6082 static void cik_vm_decode_fault(struct radeon_device *rdev, cik_vm_decode_fault() argument
6091 if (rdev->family == CHIP_HAWAII) cik_vm_decode_fault()
6105 * @rdev: radeon_device pointer
6110 void cik_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, cik_vm_flush() argument
6155 cik_hdp_flush_cp_ring_emit(rdev, ring->idx); cik_vm_flush()
6190 static void cik_enable_gui_idle_interrupt(struct radeon_device *rdev, cik_enable_gui_idle_interrupt() argument
6202 static void cik_enable_lbpw(struct radeon_device *rdev, bool enable) cik_enable_lbpw() argument
6214 static void cik_wait_for_rlc_serdes(struct radeon_device *rdev) cik_wait_for_rlc_serdes() argument
6219 mutex_lock(&rdev->grbm_idx_mutex); cik_wait_for_rlc_serdes()
6220 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { cik_wait_for_rlc_serdes()
6221 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { cik_wait_for_rlc_serdes()
6222 cik_select_se_sh(rdev, i, j); cik_wait_for_rlc_serdes()
6223 for (k = 0; k < rdev->usec_timeout; k++) { cik_wait_for_rlc_serdes()
6230 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_wait_for_rlc_serdes()
6231 mutex_unlock(&rdev->grbm_idx_mutex); cik_wait_for_rlc_serdes()
6234 for (k = 0; k < rdev->usec_timeout; k++) { cik_wait_for_rlc_serdes()
6241 static void cik_update_rlc(struct radeon_device *rdev, u32 rlc) cik_update_rlc() argument
6250 static u32 cik_halt_rlc(struct radeon_device *rdev) cik_halt_rlc() argument
6262 for (i = 0; i < rdev->usec_timeout; i++) { cik_halt_rlc()
6268 cik_wait_for_rlc_serdes(rdev); cik_halt_rlc()
6274 void cik_enter_rlc_safe_mode(struct radeon_device *rdev) cik_enter_rlc_safe_mode() argument
6282 for (i = 0; i < rdev->usec_timeout; i++) { cik_enter_rlc_safe_mode()
6288 for (i = 0; i < rdev->usec_timeout; i++) { cik_enter_rlc_safe_mode()
6295 void cik_exit_rlc_safe_mode(struct radeon_device *rdev) cik_exit_rlc_safe_mode() argument
6306 * @rdev: radeon_device pointer
6310 static void cik_rlc_stop(struct radeon_device *rdev) cik_rlc_stop() argument
6314 cik_enable_gui_idle_interrupt(rdev, false); cik_rlc_stop()
6316 cik_wait_for_rlc_serdes(rdev); cik_rlc_stop()
6322 * @rdev: radeon_device pointer
6326 static void cik_rlc_start(struct radeon_device *rdev) cik_rlc_start() argument
6330 cik_enable_gui_idle_interrupt(rdev, true); cik_rlc_start()
6338 * @rdev: radeon_device pointer
6344 static int cik_rlc_resume(struct radeon_device *rdev) cik_rlc_resume() argument
6348 if (!rdev->rlc_fw) cik_rlc_resume()
6351 cik_rlc_stop(rdev); cik_rlc_resume()
6357 si_rlc_reset(rdev); cik_rlc_resume()
6359 cik_init_pg(rdev); cik_rlc_resume()
6361 cik_init_cg(rdev); cik_rlc_resume()
6366 mutex_lock(&rdev->grbm_idx_mutex); cik_rlc_resume()
6367 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_rlc_resume()
6371 mutex_unlock(&rdev->grbm_idx_mutex); cik_rlc_resume()
6376 if (rdev->new_fw) { cik_rlc_resume()
6378 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; cik_rlc_resume()
6380 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_rlc_resume()
6392 switch (rdev->family) { cik_rlc_resume()
6409 fw_data = (const __be32 *)rdev->rlc_fw->data; cik_rlc_resume()
6417 cik_enable_lbpw(rdev, false); cik_rlc_resume()
6419 if (rdev->family == CHIP_BONAIRE) cik_rlc_resume()
6422 cik_rlc_start(rdev); cik_rlc_resume()
6427 static void cik_enable_cgcg(struct radeon_device *rdev, bool enable) cik_enable_cgcg() argument
6433 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { cik_enable_cgcg()
6434 cik_enable_gui_idle_interrupt(rdev, true); cik_enable_cgcg()
6436 tmp = cik_halt_rlc(rdev); cik_enable_cgcg()
6438 mutex_lock(&rdev->grbm_idx_mutex); cik_enable_cgcg()
6439 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_enable_cgcg()
6444 mutex_unlock(&rdev->grbm_idx_mutex); cik_enable_cgcg()
6446 cik_update_rlc(rdev, tmp); cik_enable_cgcg()
6450 cik_enable_gui_idle_interrupt(rdev, false); cik_enable_cgcg()
6465 static void cik_enable_mgcg(struct radeon_device *rdev, bool enable) cik_enable_mgcg() argument
6469 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { cik_enable_mgcg()
6470 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) { cik_enable_mgcg()
6471 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { cik_enable_mgcg()
6485 tmp = cik_halt_rlc(rdev); cik_enable_mgcg()
6487 mutex_lock(&rdev->grbm_idx_mutex); cik_enable_mgcg()
6488 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_enable_mgcg()
6493 mutex_unlock(&rdev->grbm_idx_mutex); cik_enable_mgcg()
6495 cik_update_rlc(rdev, tmp); cik_enable_mgcg()
6497 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS) { cik_enable_mgcg()
6503 if ((rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGLS) && cik_enable_mgcg()
6504 (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGTS_LS)) cik_enable_mgcg()
6535 tmp = cik_halt_rlc(rdev); cik_enable_mgcg()
6537 mutex_lock(&rdev->grbm_idx_mutex); cik_enable_mgcg()
6538 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_enable_mgcg()
6543 mutex_unlock(&rdev->grbm_idx_mutex); cik_enable_mgcg()
6545 cik_update_rlc(rdev, tmp); cik_enable_mgcg()
6562 static void cik_enable_mc_ls(struct radeon_device *rdev, cik_enable_mc_ls() argument
6570 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) cik_enable_mc_ls()
6579 static void cik_enable_mc_mgcg(struct radeon_device *rdev, cik_enable_mc_mgcg() argument
6587 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) cik_enable_mc_mgcg()
6596 static void cik_enable_sdma_mgcg(struct radeon_device *rdev, cik_enable_sdma_mgcg() argument
6601 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { cik_enable_sdma_mgcg()
6617 static void cik_enable_sdma_mgls(struct radeon_device *rdev, cik_enable_sdma_mgls() argument
6622 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_LS)) { cik_enable_sdma_mgls()
6645 static void cik_enable_uvd_mgcg(struct radeon_device *rdev, cik_enable_uvd_mgcg() argument
6650 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { cik_enable_uvd_mgcg()
6671 static void cik_enable_bif_mgls(struct radeon_device *rdev, cik_enable_bif_mgls() argument
6678 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) cik_enable_bif_mgls()
6689 static void cik_enable_hdp_mgcg(struct radeon_device *rdev, cik_enable_hdp_mgcg() argument
6696 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) cik_enable_hdp_mgcg()
6705 static void cik_enable_hdp_ls(struct radeon_device *rdev, cik_enable_hdp_ls() argument
6712 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) cik_enable_hdp_ls()
6721 void cik_update_cg(struct radeon_device *rdev, cik_update_cg() argument
6726 cik_enable_gui_idle_interrupt(rdev, false); cik_update_cg()
6729 cik_enable_mgcg(rdev, true); cik_update_cg()
6730 cik_enable_cgcg(rdev, true); cik_update_cg()
6732 cik_enable_cgcg(rdev, false); cik_update_cg()
6733 cik_enable_mgcg(rdev, false); cik_update_cg()
6735 cik_enable_gui_idle_interrupt(rdev, true); cik_update_cg()
6739 if (!(rdev->flags & RADEON_IS_IGP)) { cik_update_cg()
6740 cik_enable_mc_mgcg(rdev, enable); cik_update_cg()
6741 cik_enable_mc_ls(rdev, enable); cik_update_cg()
6746 cik_enable_sdma_mgcg(rdev, enable); cik_update_cg()
6747 cik_enable_sdma_mgls(rdev, enable); cik_update_cg()
6751 cik_enable_bif_mgls(rdev, enable); cik_update_cg()
6755 if (rdev->has_uvd) cik_update_cg()
6756 cik_enable_uvd_mgcg(rdev, enable); cik_update_cg()
6760 cik_enable_hdp_mgcg(rdev, enable); cik_update_cg()
6761 cik_enable_hdp_ls(rdev, enable); cik_update_cg()
6765 vce_v2_0_enable_mgcg(rdev, enable); cik_update_cg()
6769 static void cik_init_cg(struct radeon_device *rdev) cik_init_cg() argument
6772 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, true); cik_init_cg()
6774 if (rdev->has_uvd) cik_init_cg()
6775 si_init_uvd_internal_cg(rdev); cik_init_cg()
6777 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC | cik_init_cg()
6784 static void cik_fini_cg(struct radeon_device *rdev) cik_fini_cg() argument
6786 cik_update_cg(rdev, (RADEON_CG_BLOCK_MC | cik_fini_cg()
6792 cik_update_cg(rdev, RADEON_CG_BLOCK_GFX, false); cik_fini_cg()
6795 static void cik_enable_sck_slowdown_on_pu(struct radeon_device *rdev, cik_enable_sck_slowdown_on_pu() argument
6801 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS)) cik_enable_sck_slowdown_on_pu()
6809 static void cik_enable_sck_slowdown_on_pd(struct radeon_device *rdev, cik_enable_sck_slowdown_on_pd() argument
6815 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_RLC_SMU_HS)) cik_enable_sck_slowdown_on_pd()
6823 static void cik_enable_cp_pg(struct radeon_device *rdev, bool enable) cik_enable_cp_pg() argument
6828 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_CP)) cik_enable_cp_pg()
6836 static void cik_enable_gds_pg(struct radeon_device *rdev, bool enable) cik_enable_gds_pg() argument
6841 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GDS)) cik_enable_gds_pg()
6853 void cik_init_cp_pg_table(struct radeon_device *rdev) cik_init_cp_pg_table() argument
6860 if (rdev->family == CHIP_KAVERI) cik_init_cp_pg_table()
6863 if (rdev->rlc.cp_table_ptr == NULL) cik_init_cp_pg_table()
6867 dst_ptr = rdev->rlc.cp_table_ptr; cik_init_cp_pg_table()
6869 if (rdev->new_fw) { cik_init_cp_pg_table()
6874 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; cik_init_cp_pg_table()
6876 (rdev->ce_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_init_cp_pg_table()
6880 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; cik_init_cp_pg_table()
6882 (rdev->pfp_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_init_cp_pg_table()
6886 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; cik_init_cp_pg_table()
6888 (rdev->me_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_init_cp_pg_table()
6892 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec_fw->data; cik_init_cp_pg_table()
6894 (rdev->mec_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_init_cp_pg_table()
6898 hdr = (const struct gfx_firmware_header_v1_0 *)rdev->mec2_fw->data; cik_init_cp_pg_table()
6900 (rdev->mec2_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_init_cp_pg_table()
6915 fw_data = (const __be32 *)rdev->ce_fw->data; cik_init_cp_pg_table()
6918 fw_data = (const __be32 *)rdev->pfp_fw->data; cik_init_cp_pg_table()
6921 fw_data = (const __be32 *)rdev->me_fw->data; cik_init_cp_pg_table()
6924 fw_data = (const __be32 *)rdev->mec_fw->data; cik_init_cp_pg_table()
6937 static void cik_enable_gfx_cgpg(struct radeon_device *rdev, cik_enable_gfx_cgpg() argument
6942 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { cik_enable_gfx_cgpg()
6967 static u32 cik_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh) cik_get_cu_active_bitmap() argument
6972 mutex_lock(&rdev->grbm_idx_mutex); cik_get_cu_active_bitmap()
6973 cik_select_se_sh(rdev, se, sh); cik_get_cu_active_bitmap()
6976 cik_select_se_sh(rdev, 0xffffffff, 0xffffffff); cik_get_cu_active_bitmap()
6977 mutex_unlock(&rdev->grbm_idx_mutex); cik_get_cu_active_bitmap()
6984 for (i = 0; i < rdev->config.cik.max_cu_per_sh; i ++) { cik_get_cu_active_bitmap()
6992 static void cik_init_ao_cu_mask(struct radeon_device *rdev) cik_init_ao_cu_mask() argument
6998 for (i = 0; i < rdev->config.cik.max_shader_engines; i++) { cik_init_ao_cu_mask()
6999 for (j = 0; j < rdev->config.cik.max_sh_per_se; j++) { cik_init_ao_cu_mask()
7003 for (k = 0; k < rdev->config.cik.max_cu_per_sh; k ++) { cik_init_ao_cu_mask()
7004 if (cik_get_cu_active_bitmap(rdev, i, j) & mask) { cik_init_ao_cu_mask()
7025 static void cik_enable_gfx_static_mgpg(struct radeon_device *rdev, cik_enable_gfx_static_mgpg() argument
7031 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_SMG)) cik_enable_gfx_static_mgpg()
7039 static void cik_enable_gfx_dynamic_mgpg(struct radeon_device *rdev, cik_enable_gfx_dynamic_mgpg() argument
7045 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_DMG)) cik_enable_gfx_dynamic_mgpg()
7056 static void cik_init_gfx_cgpg(struct radeon_device *rdev) cik_init_gfx_cgpg() argument
7061 if (rdev->rlc.cs_data) { cik_init_gfx_cgpg()
7063 WREG32(RLC_GPM_SCRATCH_DATA, upper_32_bits(rdev->rlc.clear_state_gpu_addr)); cik_init_gfx_cgpg()
7064 WREG32(RLC_GPM_SCRATCH_DATA, lower_32_bits(rdev->rlc.clear_state_gpu_addr)); cik_init_gfx_cgpg()
7065 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.clear_state_size); cik_init_gfx_cgpg()
7071 if (rdev->rlc.reg_list) { cik_init_gfx_cgpg()
7073 for (i = 0; i < rdev->rlc.reg_list_size; i++) cik_init_gfx_cgpg()
7074 WREG32(RLC_GPM_SCRATCH_DATA, rdev->rlc.reg_list[i]); cik_init_gfx_cgpg()
7082 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); cik_init_gfx_cgpg()
7083 WREG32(RLC_CP_TABLE_RESTORE, rdev->rlc.cp_table_gpu_addr >> 8); cik_init_gfx_cgpg()
7105 static void cik_update_gfx_pg(struct radeon_device *rdev, bool enable) cik_update_gfx_pg() argument
7107 cik_enable_gfx_cgpg(rdev, enable); cik_update_gfx_pg()
7108 cik_enable_gfx_static_mgpg(rdev, enable); cik_update_gfx_pg()
7109 cik_enable_gfx_dynamic_mgpg(rdev, enable); cik_update_gfx_pg()
7112 u32 cik_get_csb_size(struct radeon_device *rdev) cik_get_csb_size() argument
7118 if (rdev->rlc.cs_data == NULL) cik_get_csb_size()
7126 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { cik_get_csb_size()
7144 void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer) cik_get_csb_buffer() argument
7150 if (rdev->rlc.cs_data == NULL) cik_get_csb_buffer()
7162 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { cik_get_csb_buffer()
7178 switch (rdev->family) { cik_get_csb_buffer()
7209 static void cik_init_pg(struct radeon_device *rdev) cik_init_pg() argument
7211 if (rdev->pg_flags) { cik_init_pg()
7212 cik_enable_sck_slowdown_on_pu(rdev, true); cik_init_pg()
7213 cik_enable_sck_slowdown_on_pd(rdev, true); cik_init_pg()
7214 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { cik_init_pg()
7215 cik_init_gfx_cgpg(rdev); cik_init_pg()
7216 cik_enable_cp_pg(rdev, true); cik_init_pg()
7217 cik_enable_gds_pg(rdev, true); cik_init_pg()
7219 cik_init_ao_cu_mask(rdev); cik_init_pg()
7220 cik_update_gfx_pg(rdev, true); cik_init_pg()
7224 static void cik_fini_pg(struct radeon_device *rdev) cik_fini_pg() argument
7226 if (rdev->pg_flags) { cik_fini_pg()
7227 cik_update_gfx_pg(rdev, false); cik_fini_pg()
7228 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { cik_fini_pg()
7229 cik_enable_cp_pg(rdev, false); cik_fini_pg()
7230 cik_enable_gds_pg(rdev, false); cik_fini_pg()
7253 * @rdev: radeon_device pointer
7257 static void cik_enable_interrupts(struct radeon_device *rdev) cik_enable_interrupts() argument
7266 rdev->ih.enabled = true; cik_enable_interrupts()
7272 * @rdev: radeon_device pointer
7276 static void cik_disable_interrupts(struct radeon_device *rdev) cik_disable_interrupts() argument
7288 rdev->ih.enabled = false; cik_disable_interrupts()
7289 rdev->ih.rptr = 0; cik_disable_interrupts()
7295 * @rdev: radeon_device pointer
7299 static void cik_disable_interrupt_state(struct radeon_device *rdev) cik_disable_interrupt_state() argument
7328 if (rdev->num_crtc >= 4) { cik_disable_interrupt_state()
7332 if (rdev->num_crtc >= 6) { cik_disable_interrupt_state()
7337 if (rdev->num_crtc >= 2) { cik_disable_interrupt_state()
7341 if (rdev->num_crtc >= 4) { cik_disable_interrupt_state()
7345 if (rdev->num_crtc >= 6) { cik_disable_interrupt_state()
7372 * @rdev: radeon_device pointer
7380 static int cik_irq_init(struct radeon_device *rdev) cik_irq_init() argument
7387 ret = r600_ih_ring_alloc(rdev); cik_irq_init()
7392 cik_disable_interrupts(rdev); cik_irq_init()
7395 ret = cik_rlc_resume(rdev); cik_irq_init()
7397 r600_ih_ring_fini(rdev); cik_irq_init()
7403 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8); cik_irq_init()
7413 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); cik_irq_init()
7414 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); cik_irq_init()
7420 if (rdev->wb.enabled) cik_irq_init()
7424 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); cik_irq_init()
7425 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); cik_irq_init()
7436 if (rdev->msi_enabled) cik_irq_init()
7441 cik_disable_interrupt_state(rdev); cik_irq_init()
7443 pci_set_master(rdev->pdev); cik_irq_init()
7446 cik_enable_interrupts(rdev); cik_irq_init()
7454 * @rdev: radeon_device pointer
7460 int cik_irq_set(struct radeon_device *rdev) cik_irq_set() argument
7469 if (!rdev->irq.installed) { cik_irq_set()
7474 if (!rdev->ih.enabled) { cik_irq_set()
7475 cik_disable_interrupts(rdev); cik_irq_set()
7477 cik_disable_interrupt_state(rdev); cik_irq_set()
7498 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { cik_irq_set()
7502 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { cik_irq_set()
7503 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; cik_irq_set()
7518 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { cik_irq_set()
7519 struct radeon_ring *ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; cik_irq_set()
7535 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { cik_irq_set()
7540 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { cik_irq_set()
7545 if (rdev->irq.crtc_vblank_int[0] || cik_irq_set()
7546 atomic_read(&rdev->irq.pflip[0])) { cik_irq_set()
7550 if (rdev->irq.crtc_vblank_int[1] || cik_irq_set()
7551 atomic_read(&rdev->irq.pflip[1])) { cik_irq_set()
7555 if (rdev->irq.crtc_vblank_int[2] || cik_irq_set()
7556 atomic_read(&rdev->irq.pflip[2])) { cik_irq_set()
7560 if (rdev->irq.crtc_vblank_int[3] || cik_irq_set()
7561 atomic_read(&rdev->irq.pflip[3])) { cik_irq_set()
7565 if (rdev->irq.crtc_vblank_int[4] || cik_irq_set()
7566 atomic_read(&rdev->irq.pflip[4])) { cik_irq_set()
7570 if (rdev->irq.crtc_vblank_int[5] || cik_irq_set()
7571 atomic_read(&rdev->irq.pflip[5])) { cik_irq_set()
7575 if (rdev->irq.hpd[0]) { cik_irq_set()
7579 if (rdev->irq.hpd[1]) { cik_irq_set()
7583 if (rdev->irq.hpd[2]) { cik_irq_set()
7587 if (rdev->irq.hpd[3]) { cik_irq_set()
7591 if (rdev->irq.hpd[4]) { cik_irq_set()
7595 if (rdev->irq.hpd[5]) { cik_irq_set()
7611 if (rdev->num_crtc >= 4) { cik_irq_set()
7615 if (rdev->num_crtc >= 6) { cik_irq_set()
7620 if (rdev->num_crtc >= 2) { cik_irq_set()
7626 if (rdev->num_crtc >= 4) { cik_irq_set()
7632 if (rdev->num_crtc >= 6) { cik_irq_set()
7655 * @rdev: radeon_device pointer
7661 static inline void cik_irq_ack(struct radeon_device *rdev) cik_irq_ack() argument
7665 rdev->irq.stat_regs.cik.disp_int = RREG32(DISP_INTERRUPT_STATUS); cik_irq_ack()
7666 rdev->irq.stat_regs.cik.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); cik_irq_ack()
7667 rdev->irq.stat_regs.cik.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); cik_irq_ack()
7668 rdev->irq.stat_regs.cik.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); cik_irq_ack()
7669 rdev->irq.stat_regs.cik.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); cik_irq_ack()
7670 rdev->irq.stat_regs.cik.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); cik_irq_ack()
7671 rdev->irq.stat_regs.cik.disp_int_cont6 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE6); cik_irq_ack()
7673 rdev->irq.stat_regs.cik.d1grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7675 rdev->irq.stat_regs.cik.d2grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7677 if (rdev->num_crtc >= 4) { cik_irq_ack()
7678 rdev->irq.stat_regs.cik.d3grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7680 rdev->irq.stat_regs.cik.d4grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7683 if (rdev->num_crtc >= 6) { cik_irq_ack()
7684 rdev->irq.stat_regs.cik.d5grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7686 rdev->irq.stat_regs.cik.d6grph_int = RREG32(GRPH_INT_STATUS + cik_irq_ack()
7690 if (rdev->irq.stat_regs.cik.d1grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7693 if (rdev->irq.stat_regs.cik.d2grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7696 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT) cik_irq_ack()
7698 if (rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT) cik_irq_ack()
7700 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT) cik_irq_ack()
7702 if (rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT) cik_irq_ack()
7705 if (rdev->num_crtc >= 4) { cik_irq_ack()
7706 if (rdev->irq.stat_regs.cik.d3grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7709 if (rdev->irq.stat_regs.cik.d4grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7712 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) cik_irq_ack()
7714 if (rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) cik_irq_ack()
7716 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) cik_irq_ack()
7718 if (rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) cik_irq_ack()
7722 if (rdev->num_crtc >= 6) { cik_irq_ack()
7723 if (rdev->irq.stat_regs.cik.d5grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7726 if (rdev->irq.stat_regs.cik.d6grph_int & GRPH_PFLIP_INT_OCCURRED) cik_irq_ack()
7729 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) cik_irq_ack()
7731 if (rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) cik_irq_ack()
7733 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) cik_irq_ack()
7735 if (rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) cik_irq_ack()
7739 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT) { cik_irq_ack()
7744 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT) { cik_irq_ack()
7749 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT) { cik_irq_ack()
7754 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT) { cik_irq_ack()
7759 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT) { cik_irq_ack()
7764 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT) { cik_irq_ack()
7769 if (rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT) { cik_irq_ack()
7774 if (rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT) { cik_irq_ack()
7779 if (rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { cik_irq_ack()
7784 if (rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { cik_irq_ack()
7789 if (rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { cik_irq_ack()
7794 if (rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { cik_irq_ack()
7804 * @rdev: radeon_device pointer
7808 static void cik_irq_disable(struct radeon_device *rdev) cik_irq_disable() argument
7810 cik_disable_interrupts(rdev); cik_irq_disable()
7813 cik_irq_ack(rdev); cik_irq_disable()
7814 cik_disable_interrupt_state(rdev); cik_irq_disable()
7820 * @rdev: radeon_device pointer
7825 static void cik_irq_suspend(struct radeon_device *rdev) cik_irq_suspend() argument
7827 cik_irq_disable(rdev); cik_irq_suspend()
7828 cik_rlc_stop(rdev); cik_irq_suspend()
7834 * @rdev: radeon_device pointer
7840 static void cik_irq_fini(struct radeon_device *rdev) cik_irq_fini() argument
7842 cik_irq_suspend(rdev); cik_irq_fini()
7843 r600_ih_ring_fini(rdev); cik_irq_fini()
7849 * @rdev: radeon_device pointer
7857 static inline u32 cik_get_ih_wptr(struct radeon_device *rdev) cik_get_ih_wptr() argument
7861 if (rdev->wb.enabled) cik_get_ih_wptr()
7862 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); cik_get_ih_wptr()
7872 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", cik_get_ih_wptr()
7873 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); cik_get_ih_wptr()
7874 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; cik_get_ih_wptr()
7879 return (wptr & rdev->ih.ptr_mask); cik_get_ih_wptr()
7907 * @rdev: radeon_device pointer
7914 int cik_irq_process(struct radeon_device *rdev) cik_irq_process() argument
7916 struct radeon_ring *cp1_ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; cik_irq_process()
7917 struct radeon_ring *cp2_ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; cik_irq_process()
7929 if (!rdev->ih.enabled || rdev->shutdown) cik_irq_process()
7932 wptr = cik_get_ih_wptr(rdev); cik_irq_process()
7936 if (atomic_xchg(&rdev->ih.lock, 1)) cik_irq_process()
7939 rptr = rdev->ih.rptr; cik_irq_process()
7946 cik_irq_ack(rdev); cik_irq_process()
7952 radeon_kfd_interrupt(rdev, cik_irq_process()
7953 (const void *) &rdev->ih.ring[ring_index]); cik_irq_process()
7955 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; cik_irq_process()
7956 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; cik_irq_process()
7957 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; cik_irq_process()
7963 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VBLANK_INTERRUPT)) cik_irq_process()
7966 if (rdev->irq.crtc_vblank_int[0]) { cik_irq_process()
7967 drm_handle_vblank(rdev->ddev, 0); cik_irq_process()
7968 rdev->pm.vblank_sync = true; cik_irq_process()
7969 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
7971 if (atomic_read(&rdev->irq.pflip[0])) cik_irq_process()
7972 radeon_crtc_handle_vblank(rdev, 0); cik_irq_process()
7973 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VBLANK_INTERRUPT; cik_irq_process()
7978 if (!(rdev->irq.stat_regs.cik.disp_int & LB_D1_VLINE_INTERRUPT)) cik_irq_process()
7981 rdev->irq.stat_regs.cik.disp_int &= ~LB_D1_VLINE_INTERRUPT; cik_irq_process()
7993 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) cik_irq_process()
7996 if (rdev->irq.crtc_vblank_int[1]) { cik_irq_process()
7997 drm_handle_vblank(rdev->ddev, 1); cik_irq_process()
7998 rdev->pm.vblank_sync = true; cik_irq_process()
7999 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
8001 if (atomic_read(&rdev->irq.pflip[1])) cik_irq_process()
8002 radeon_crtc_handle_vblank(rdev, 1); cik_irq_process()
8003 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; cik_irq_process()
8008 if (!(rdev->irq.stat_regs.cik.disp_int_cont & LB_D2_VLINE_INTERRUPT)) cik_irq_process()
8011 rdev->irq.stat_regs.cik.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; cik_irq_process()
8023 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) cik_irq_process()
8026 if (rdev->irq.crtc_vblank_int[2]) { cik_irq_process()
8027 drm_handle_vblank(rdev->ddev, 2); cik_irq_process()
8028 rdev->pm.vblank_sync = true; cik_irq_process()
8029 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
8031 if (atomic_read(&rdev->irq.pflip[2])) cik_irq_process()
8032 radeon_crtc_handle_vblank(rdev, 2); cik_irq_process()
8033 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; cik_irq_process()
8038 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) cik_irq_process()
8041 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; cik_irq_process()
8053 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) cik_irq_process()
8056 if (rdev->irq.crtc_vblank_int[3]) { cik_irq_process()
8057 drm_handle_vblank(rdev->ddev, 3); cik_irq_process()
8058 rdev->pm.vblank_sync = true; cik_irq_process()
8059 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
8061 if (atomic_read(&rdev->irq.pflip[3])) cik_irq_process()
8062 radeon_crtc_handle_vblank(rdev, 3); cik_irq_process()
8063 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; cik_irq_process()
8068 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) cik_irq_process()
8071 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; cik_irq_process()
8083 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) cik_irq_process()
8086 if (rdev->irq.crtc_vblank_int[4]) { cik_irq_process()
8087 drm_handle_vblank(rdev->ddev, 4); cik_irq_process()
8088 rdev->pm.vblank_sync = true; cik_irq_process()
8089 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
8091 if (atomic_read(&rdev->irq.pflip[4])) cik_irq_process()
8092 radeon_crtc_handle_vblank(rdev, 4); cik_irq_process()
8093 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; cik_irq_process()
8098 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) cik_irq_process()
8101 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; cik_irq_process()
8113 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) cik_irq_process()
8116 if (rdev->irq.crtc_vblank_int[5]) { cik_irq_process()
8117 drm_handle_vblank(rdev->ddev, 5); cik_irq_process()
8118 rdev->pm.vblank_sync = true; cik_irq_process()
8119 wake_up(&rdev->irq.vblank_queue); cik_irq_process()
8121 if (atomic_read(&rdev->irq.pflip[5])) cik_irq_process()
8122 radeon_crtc_handle_vblank(rdev, 5); cik_irq_process()
8123 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; cik_irq_process()
8128 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) cik_irq_process()
8131 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; cik_irq_process()
8148 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); cik_irq_process()
8153 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_INTERRUPT)) cik_irq_process()
8156 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_INTERRUPT; cik_irq_process()
8162 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_INTERRUPT)) cik_irq_process()
8165 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_INTERRUPT; cik_irq_process()
8171 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_INTERRUPT)) cik_irq_process()
8174 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; cik_irq_process()
8180 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_INTERRUPT)) cik_irq_process()
8183 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; cik_irq_process()
8189 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_INTERRUPT)) cik_irq_process()
8192 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; cik_irq_process()
8198 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_INTERRUPT)) cik_irq_process()
8201 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; cik_irq_process()
8207 if (!(rdev->irq.stat_regs.cik.disp_int & DC_HPD1_RX_INTERRUPT)) cik_irq_process()
8210 rdev->irq.stat_regs.cik.disp_int &= ~DC_HPD1_RX_INTERRUPT; cik_irq_process()
8216 if (!(rdev->irq.stat_regs.cik.disp_int_cont & DC_HPD2_RX_INTERRUPT)) cik_irq_process()
8219 rdev->irq.stat_regs.cik.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; cik_irq_process()
8225 if (!(rdev->irq.stat_regs.cik.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) cik_irq_process()
8228 rdev->irq.stat_regs.cik.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; cik_irq_process()
8234 if (!(rdev->irq.stat_regs.cik.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) cik_irq_process()
8237 rdev->irq.stat_regs.cik.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; cik_irq_process()
8243 if (!(rdev->irq.stat_regs.cik.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) cik_irq_process()
8246 rdev->irq.stat_regs.cik.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; cik_irq_process()
8252 if (!(rdev->irq.stat_regs.cik.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) cik_irq_process()
8255 rdev->irq.stat_regs.cik.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; cik_irq_process()
8271 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); cik_irq_process()
8282 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); cik_irq_process()
8283 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", cik_irq_process()
8285 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", cik_irq_process()
8287 cik_vm_decode_fault(rdev, status, addr, mc_client); cik_irq_process()
8293 radeon_fence_process(rdev, TN_RING_TYPE_VCE1_INDEX); cik_irq_process()
8296 radeon_fence_process(rdev, TN_RING_TYPE_VCE2_INDEX); cik_irq_process()
8305 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); cik_irq_process()
8315 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); cik_irq_process()
8320 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); cik_irq_process()
8322 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); cik_irq_process()
8381 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); cik_irq_process()
8394 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); cik_irq_process()
8408 rdev->pm.dpm.thermal.high_to_low = false; cik_irq_process()
8413 rdev->pm.dpm.thermal.high_to_low = true; cik_irq_process()
8465 rptr &= rdev->ih.ptr_mask; cik_irq_process()
8469 schedule_work(&rdev->dp_work); cik_irq_process()
8471 schedule_delayed_work(&rdev->hotplug_work, 0); cik_irq_process()
8473 rdev->needs_reset = true; cik_irq_process()
8474 wake_up_all(&rdev->fence_queue); cik_irq_process()
8477 schedule_work(&rdev->pm.dpm.thermal.work); cik_irq_process()
8478 rdev->ih.rptr = rptr; cik_irq_process()
8479 atomic_set(&rdev->ih.lock, 0); cik_irq_process()
8482 wptr = cik_get_ih_wptr(rdev); cik_irq_process()
8495 * @rdev: radeon_device pointer
8501 static int cik_startup(struct radeon_device *rdev) cik_startup() argument
8508 cik_pcie_gen3_enable(rdev); cik_startup()
8510 cik_program_aspm(rdev); cik_startup()
8513 r = r600_vram_scratch_init(rdev); cik_startup()
8517 cik_mc_program(rdev); cik_startup()
8519 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) { cik_startup()
8520 r = ci_mc_load_microcode(rdev); cik_startup()
8527 r = cik_pcie_gart_enable(rdev); cik_startup()
8530 cik_gpu_init(rdev); cik_startup()
8533 if (rdev->flags & RADEON_IS_IGP) { cik_startup()
8534 if (rdev->family == CHIP_KAVERI) { cik_startup()
8535 rdev->rlc.reg_list = spectre_rlc_save_restore_register_list; cik_startup()
8536 rdev->rlc.reg_list_size = cik_startup()
8539 rdev->rlc.reg_list = kalindi_rlc_save_restore_register_list; cik_startup()
8540 rdev->rlc.reg_list_size = cik_startup()
8544 rdev->rlc.cs_data = ci_cs_data; cik_startup()
8545 rdev->rlc.cp_table_size = CP_ME_TABLE_SIZE * 5 * 4; cik_startup()
8546 r = sumo_rlc_init(rdev); cik_startup()
8553 r = radeon_wb_init(rdev); cik_startup()
8558 r = cik_mec_init(rdev); cik_startup()
8564 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); cik_startup()
8566 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cik_startup()
8570 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); cik_startup()
8572 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cik_startup()
8576 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); cik_startup()
8578 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); cik_startup()
8582 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); cik_startup()
8584 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); cik_startup()
8588 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); cik_startup()
8590 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); cik_startup()
8594 r = radeon_uvd_resume(rdev); cik_startup()
8596 r = uvd_v4_2_resume(rdev); cik_startup()
8598 r = radeon_fence_driver_start_ring(rdev, cik_startup()
8601 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); cik_startup()
8605 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; cik_startup()
8607 r = radeon_vce_resume(rdev); cik_startup()
8609 r = vce_v2_0_resume(rdev); cik_startup()
8611 r = radeon_fence_driver_start_ring(rdev, cik_startup()
8614 r = radeon_fence_driver_start_ring(rdev, cik_startup()
8618 dev_err(rdev->dev, "VCE init error (%d).\n", r); cik_startup()
8619 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; cik_startup()
8620 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; cik_startup()
8624 if (!rdev->irq.installed) { cik_startup()
8625 r = radeon_irq_kms_init(rdev); cik_startup()
8630 r = cik_irq_init(rdev); cik_startup()
8633 radeon_irq_kms_fini(rdev); cik_startup()
8636 cik_irq_set(rdev); cik_startup()
8638 if (rdev->family == CHIP_HAWAII) { cik_startup()
8639 if (rdev->new_fw) cik_startup()
8647 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cik_startup()
8648 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, cik_startup()
8655 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; cik_startup()
8656 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, cik_startup()
8666 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; cik_startup()
8667 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, cik_startup()
8677 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cik_startup()
8678 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, cik_startup()
8683 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cik_startup()
8684 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, cik_startup()
8689 r = cik_cp_resume(rdev); cik_startup()
8693 r = cik_sdma_resume(rdev); cik_startup()
8697 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; cik_startup()
8699 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, cik_startup()
8702 r = uvd_v1_0_init(rdev); cik_startup()
8709 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; cik_startup()
8711 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, cik_startup()
8714 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; cik_startup()
8716 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, cik_startup()
8720 r = vce_v1_0_init(rdev); cik_startup()
8724 r = radeon_ib_pool_init(rdev); cik_startup()
8726 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); cik_startup()
8730 r = radeon_vm_manager_init(rdev); cik_startup()
8732 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); cik_startup()
8736 r = radeon_audio_init(rdev); cik_startup()
8740 r = radeon_kfd_resume(rdev); cik_startup()
8750 * @rdev: radeon_device pointer
8756 int cik_resume(struct radeon_device *rdev) cik_resume() argument
8761 atom_asic_init(rdev->mode_info.atom_context); cik_resume()
8764 cik_init_golden_registers(rdev); cik_resume()
8766 if (rdev->pm.pm_method == PM_METHOD_DPM) cik_resume()
8767 radeon_pm_resume(rdev); cik_resume()
8769 rdev->accel_working = true; cik_resume()
8770 r = cik_startup(rdev); cik_resume()
8773 rdev->accel_working = false; cik_resume()
8784 * @rdev: radeon_device pointer
8790 int cik_suspend(struct radeon_device *rdev) cik_suspend() argument
8792 radeon_kfd_suspend(rdev); cik_suspend()
8793 radeon_pm_suspend(rdev); cik_suspend()
8794 radeon_audio_fini(rdev); cik_suspend()
8795 radeon_vm_manager_fini(rdev); cik_suspend()
8796 cik_cp_enable(rdev, false); cik_suspend()
8797 cik_sdma_enable(rdev, false); cik_suspend()
8798 uvd_v1_0_fini(rdev); cik_suspend()
8799 radeon_uvd_suspend(rdev); cik_suspend()
8800 radeon_vce_suspend(rdev); cik_suspend()
8801 cik_fini_pg(rdev); cik_suspend()
8802 cik_fini_cg(rdev); cik_suspend()
8803 cik_irq_suspend(rdev); cik_suspend()
8804 radeon_wb_disable(rdev); cik_suspend()
8805 cik_pcie_gart_disable(rdev); cik_suspend()
8818 * @rdev: radeon_device pointer
8825 int cik_init(struct radeon_device *rdev) cik_init() argument
8831 if (!radeon_get_bios(rdev)) { cik_init()
8832 if (ASIC_IS_AVIVO(rdev)) cik_init()
8836 if (!rdev->is_atom_bios) { cik_init()
8837 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); cik_init()
8840 r = radeon_atombios_init(rdev); cik_init()
8845 if (!radeon_card_posted(rdev)) { cik_init()
8846 if (!rdev->bios) { cik_init()
8847 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); cik_init()
8851 atom_asic_init(rdev->mode_info.atom_context); cik_init()
8854 cik_init_golden_registers(rdev); cik_init()
8856 cik_scratch_init(rdev); cik_init()
8858 radeon_surface_init(rdev); cik_init()
8860 radeon_get_clock_info(rdev->ddev); cik_init()
8863 r = radeon_fence_driver_init(rdev); cik_init()
8868 r = cik_mc_init(rdev); cik_init()
8872 r = radeon_bo_init(rdev); cik_init()
8876 if (rdev->flags & RADEON_IS_IGP) { cik_init()
8877 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || cik_init()
8878 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw) { cik_init()
8879 r = cik_init_microcode(rdev); cik_init()
8886 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || cik_init()
8887 !rdev->mec_fw || !rdev->sdma_fw || !rdev->rlc_fw || cik_init()
8888 !rdev->mc_fw) { cik_init()
8889 r = cik_init_microcode(rdev); cik_init()
8898 radeon_pm_init(rdev); cik_init()
8900 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; cik_init()
8902 r600_ring_init(rdev, ring, 1024 * 1024); cik_init()
8904 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; cik_init()
8906 r600_ring_init(rdev, ring, 1024 * 1024); cik_init()
8907 r = radeon_doorbell_get(rdev, &ring->doorbell_index); cik_init()
8911 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; cik_init()
8913 r600_ring_init(rdev, ring, 1024 * 1024); cik_init()
8914 r = radeon_doorbell_get(rdev, &ring->doorbell_index); cik_init()
8918 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cik_init()
8920 r600_ring_init(rdev, ring, 256 * 1024); cik_init()
8922 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cik_init()
8924 r600_ring_init(rdev, ring, 256 * 1024); cik_init()
8926 r = radeon_uvd_init(rdev); cik_init()
8928 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; cik_init()
8930 r600_ring_init(rdev, ring, 4096); cik_init()
8933 r = radeon_vce_init(rdev); cik_init()
8935 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; cik_init()
8937 r600_ring_init(rdev, ring, 4096); cik_init()
8939 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; cik_init()
8941 r600_ring_init(rdev, ring, 4096); cik_init()
8944 rdev->ih.ring_obj = NULL; cik_init()
8945 r600_ih_ring_init(rdev, 64 * 1024); cik_init()
8947 r = r600_pcie_gart_init(rdev); cik_init()
8951 rdev->accel_working = true; cik_init()
8952 r = cik_startup(rdev); cik_init()
8954 dev_err(rdev->dev, "disabling GPU acceleration\n"); cik_init()
8955 cik_cp_fini(rdev); cik_init()
8956 cik_sdma_fini(rdev); cik_init()
8957 cik_irq_fini(rdev); cik_init()
8958 sumo_rlc_fini(rdev); cik_init()
8959 cik_mec_fini(rdev); cik_init()
8960 radeon_wb_fini(rdev); cik_init()
8961 radeon_ib_pool_fini(rdev); cik_init()
8962 radeon_vm_manager_fini(rdev); cik_init()
8963 radeon_irq_kms_fini(rdev); cik_init()
8964 cik_pcie_gart_fini(rdev); cik_init()
8965 rdev->accel_working = false; cik_init()
8972 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { cik_init()
8983 * @rdev: radeon_device pointer
8989 void cik_fini(struct radeon_device *rdev) cik_fini() argument
8991 radeon_pm_fini(rdev); cik_fini()
8992 cik_cp_fini(rdev); cik_fini()
8993 cik_sdma_fini(rdev); cik_fini()
8994 cik_fini_pg(rdev); cik_fini()
8995 cik_fini_cg(rdev); cik_fini()
8996 cik_irq_fini(rdev); cik_fini()
8997 sumo_rlc_fini(rdev); cik_fini()
8998 cik_mec_fini(rdev); cik_fini()
8999 radeon_wb_fini(rdev); cik_fini()
9000 radeon_vm_manager_fini(rdev); cik_fini()
9001 radeon_ib_pool_fini(rdev); cik_fini()
9002 radeon_irq_kms_fini(rdev); cik_fini()
9003 uvd_v1_0_fini(rdev); cik_fini()
9004 radeon_uvd_fini(rdev); cik_fini()
9005 radeon_vce_fini(rdev); cik_fini()
9006 cik_pcie_gart_fini(rdev); cik_fini()
9007 r600_vram_scratch_fini(rdev); cik_fini()
9008 radeon_gem_fini(rdev); cik_fini()
9009 radeon_fence_driver_fini(rdev); cik_fini()
9010 radeon_bo_fini(rdev); cik_fini()
9011 radeon_atombios_fini(rdev); cik_fini()
9012 kfree(rdev->bios); cik_fini()
9013 rdev->bios = NULL; cik_fini()
9019 struct radeon_device *rdev = dev->dev_private; dce8_program_fmt() local
9084 * @rdev: radeon_device pointer
9093 static u32 dce8_line_buffer_adjust(struct radeon_device *rdev, dce8_line_buffer_adjust() argument
9116 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4; dce8_line_buffer_adjust()
9120 buffer_alloc = (rdev->flags & RADEON_IS_IGP) ? 2 : 4; dce8_line_buffer_adjust()
9132 for (i = 0; i < rdev->usec_timeout; i++) { dce8_line_buffer_adjust()
9158 * @rdev: radeon_device pointer
9164 static u32 cik_get_number_of_dram_channels(struct radeon_device *rdev) cik_get_number_of_dram_channels() argument
9527 * @rdev: radeon_device pointer
9535 static void dce8_program_watermarks(struct radeon_device *rdev, dce8_program_watermarks() argument
9551 if ((rdev->pm.pm_method == PM_METHOD_DPM) && dce8_program_watermarks()
9552 rdev->pm.dpm_enabled) { dce8_program_watermarks()
9554 radeon_dpm_get_mclk(rdev, false) * 10; dce8_program_watermarks()
9556 radeon_dpm_get_sclk(rdev, false) * 10; dce8_program_watermarks()
9558 wm_high.yclk = rdev->pm.current_mclk * 10; dce8_program_watermarks()
9559 wm_high.sclk = rdev->pm.current_sclk * 10; dce8_program_watermarks()
9575 wm_high.dram_channels = cik_get_number_of_dram_channels(rdev); dce8_program_watermarks()
9586 (rdev->disp_priority == 2)) { dce8_program_watermarks()
9591 if ((rdev->pm.pm_method == PM_METHOD_DPM) && dce8_program_watermarks()
9592 rdev->pm.dpm_enabled) { dce8_program_watermarks()
9594 radeon_dpm_get_mclk(rdev, true) * 10; dce8_program_watermarks()
9596 radeon_dpm_get_sclk(rdev, true) * 10; dce8_program_watermarks()
9598 wm_low.yclk = rdev->pm.current_mclk * 10; dce8_program_watermarks()
9599 wm_low.sclk = rdev->pm.current_sclk * 10; dce8_program_watermarks()
9615 wm_low.dram_channels = cik_get_number_of_dram_channels(rdev); dce8_program_watermarks()
9626 (rdev->disp_priority == 2)) { dce8_program_watermarks()
9663 * @rdev: radeon_device pointer
9668 void dce8_bandwidth_update(struct radeon_device *rdev) dce8_bandwidth_update() argument
9674 if (!rdev->mode_info.mode_config_initialized) dce8_bandwidth_update()
9677 radeon_update_display_priority(rdev); dce8_bandwidth_update()
9679 for (i = 0; i < rdev->num_crtc; i++) { dce8_bandwidth_update()
9680 if (rdev->mode_info.crtcs[i]->base.enabled) dce8_bandwidth_update()
9683 for (i = 0; i < rdev->num_crtc; i++) { dce8_bandwidth_update()
9684 mode = &rdev->mode_info.crtcs[i]->base.mode; dce8_bandwidth_update()
9685 lb_size = dce8_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode); dce8_bandwidth_update()
9686 dce8_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); dce8_bandwidth_update()
9693 * @rdev: radeon_device pointer
9698 uint64_t cik_get_gpu_clock_counter(struct radeon_device *rdev) cik_get_gpu_clock_counter() argument
9702 mutex_lock(&rdev->gpu_clock_mutex); cik_get_gpu_clock_counter()
9706 mutex_unlock(&rdev->gpu_clock_mutex); cik_get_gpu_clock_counter()
9710 static int cik_set_uvd_clock(struct radeon_device *rdev, u32 clock, cik_set_uvd_clock() argument
9717 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK, cik_set_uvd_clock()
9738 int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) cik_set_uvd_clocks() argument
9742 r = cik_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS); cik_set_uvd_clocks()
9746 r = cik_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS); cik_set_uvd_clocks()
9750 int cik_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) cik_set_vce_clocks() argument
9756 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_GPUCLK_INPUT_FLAG_DEFAULT_GPUCLK, cik_set_vce_clocks()
9785 static void cik_pcie_gen3_enable(struct radeon_device *rdev) cik_pcie_gen3_enable() argument
9787 struct pci_dev *root = rdev->pdev->bus->self; cik_pcie_gen3_enable()
9793 if (pci_is_root_bus(rdev->pdev->bus)) cik_pcie_gen3_enable()
9799 if (rdev->flags & RADEON_IS_IGP) cik_pcie_gen3_enable()
9802 if (!(rdev->flags & RADEON_IS_PCIE)) cik_pcie_gen3_enable()
9805 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); cik_pcie_gen3_enable()
9833 gpu_pos = pci_pcie_cap(rdev->pdev); cik_pcie_gen3_enable()
9845 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); cik_pcie_gen3_enable()
9851 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); cik_pcie_gen3_enable()
9869 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16); cik_pcie_gen3_enable()
9874 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); cik_pcie_gen3_enable()
9877 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2); cik_pcie_gen3_enable()
9895 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16); cik_pcie_gen3_enable()
9898 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); cik_pcie_gen3_enable()
9906 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); cik_pcie_gen3_enable()
9909 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); cik_pcie_gen3_enable()
9923 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); cik_pcie_gen3_enable()
9931 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); cik_pcie_gen3_enable()
9937 for (i = 0; i < rdev->usec_timeout; i++) { cik_pcie_gen3_enable()
9945 static void cik_program_aspm(struct radeon_device *rdev) cik_program_aspm() argument
9955 if (rdev->flags & RADEON_IS_IGP) cik_program_aspm()
9958 if (!(rdev->flags & RADEON_IS_PCIE)) cik_program_aspm()
10023 !pci_is_root_bus(rdev->pdev->bus)) { cik_program_aspm()
10024 struct pci_dev *root = rdev->pdev->bus->self; cik_program_aspm()
H A Dtrinity_smc.c30 static int trinity_notify_message_to_smu(struct radeon_device *rdev, u32 id) trinity_notify_message_to_smu() argument
36 for (i = 0; i < rdev->usec_timeout; i++) { trinity_notify_message_to_smu()
56 int trinity_dpm_bapm_enable(struct radeon_device *rdev, bool enable) trinity_dpm_bapm_enable() argument
59 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_EnableBAPM); trinity_dpm_bapm_enable()
61 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DisableBAPM); trinity_dpm_bapm_enable()
64 int trinity_dpm_config(struct radeon_device *rdev, bool enable) trinity_dpm_config() argument
71 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DPM_Config); trinity_dpm_config()
74 int trinity_dpm_force_state(struct radeon_device *rdev, u32 n) trinity_dpm_force_state() argument
78 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DPM_ForceState); trinity_dpm_force_state()
81 int trinity_dpm_n_levels_disabled(struct radeon_device *rdev, u32 n) trinity_dpm_n_levels_disabled() argument
85 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DPM_N_LevelsDisabled); trinity_dpm_n_levels_disabled()
88 int trinity_uvd_dpm_config(struct radeon_device *rdev) trinity_uvd_dpm_config() argument
90 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_UVD_DPM_Config); trinity_uvd_dpm_config()
93 int trinity_dpm_no_forced_level(struct radeon_device *rdev) trinity_dpm_no_forced_level() argument
95 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_NoForcedLevel); trinity_dpm_no_forced_level()
98 int trinity_dce_enable_voltage_adjustment(struct radeon_device *rdev, trinity_dce_enable_voltage_adjustment() argument
102 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DCE_AllowVoltageAdjustment); trinity_dce_enable_voltage_adjustment()
104 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_DCE_RemoveVoltageAdjustment); trinity_dce_enable_voltage_adjustment()
107 int trinity_gfx_dynamic_mgpg_config(struct radeon_device *rdev) trinity_gfx_dynamic_mgpg_config() argument
109 return trinity_notify_message_to_smu(rdev, PPSMC_MSG_PG_SIMD_Config); trinity_gfx_dynamic_mgpg_config()
112 void trinity_acquire_mutex(struct radeon_device *rdev) trinity_acquire_mutex() argument
117 for (i = 0; i < rdev->usec_timeout; i++) { trinity_acquire_mutex()
124 void trinity_release_mutex(struct radeon_device *rdev) trinity_release_mutex() argument
H A Dradeon_fence.c56 * @rdev: radeon_device pointer
62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) radeon_fence_write() argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; radeon_fence_write()
65 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { radeon_fence_write()
77 * @rdev: radeon_device pointer
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) radeon_fence_read() argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; radeon_fence_read()
88 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { radeon_fence_read()
103 * @rdev: radeon_device pointer
108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) radeon_fence_schedule_check() argument
115 &rdev->fence_drv[ring].lockup_work, radeon_fence_schedule_check()
122 * @rdev: radeon_device pointer
129 int radeon_fence_emit(struct radeon_device *rdev, radeon_fence_emit() argument
133 u64 seq = ++rdev->fence_drv[ring].sync_seq[ring]; radeon_fence_emit()
140 (*fence)->rdev = rdev; radeon_fence_emit()
145 &rdev->fence_queue.lock, rdev->fence_context + ring, seq); radeon_fence_emit()
146 radeon_fence_ring_emit(rdev, ring, *fence); radeon_fence_emit()
147 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); radeon_fence_emit()
148 radeon_fence_schedule_check(rdev, ring); radeon_fence_emit()
170 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); radeon_fence_check_signaled()
179 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); radeon_fence_check_signaled()
180 __remove_wait_queue(&fence->rdev->fence_queue, &fence->fence_wake); radeon_fence_check_signaled()
190 * @rdev: radeon_device pointer
197 static bool radeon_fence_activity(struct radeon_device *rdev, int ring) radeon_fence_activity() argument
224 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); radeon_fence_activity()
226 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; radeon_fence_activity()
227 seq = radeon_fence_read(rdev, ring); radeon_fence_activity()
251 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); radeon_fence_activity()
254 radeon_fence_schedule_check(rdev, ring); radeon_fence_activity()
270 struct radeon_device *rdev; radeon_fence_check_lockup() local
275 rdev = fence_drv->rdev; radeon_fence_check_lockup()
276 ring = fence_drv - &rdev->fence_drv[0]; radeon_fence_check_lockup()
278 if (!down_read_trylock(&rdev->exclusive_lock)) { radeon_fence_check_lockup()
280 radeon_fence_schedule_check(rdev, ring); radeon_fence_check_lockup()
284 if (fence_drv->delayed_irq && rdev->ddev->irq_enabled) { radeon_fence_check_lockup()
288 spin_lock_irqsave(&rdev->irq.lock, irqflags); radeon_fence_check_lockup()
289 radeon_irq_set(rdev); radeon_fence_check_lockup()
290 spin_unlock_irqrestore(&rdev->irq.lock, irqflags); radeon_fence_check_lockup()
293 if (radeon_fence_activity(rdev, ring)) radeon_fence_check_lockup()
294 wake_up_all(&rdev->fence_queue); radeon_fence_check_lockup()
296 else if (radeon_ring_is_lockup(rdev, ring, &rdev->ring[ring])) { radeon_fence_check_lockup()
299 dev_warn(rdev->dev, "GPU lockup (current fence id " radeon_fence_check_lockup()
305 rdev->needs_reset = true; radeon_fence_check_lockup()
306 wake_up_all(&rdev->fence_queue); radeon_fence_check_lockup()
308 up_read(&rdev->exclusive_lock); radeon_fence_check_lockup()
314 * @rdev: radeon_device pointer
320 void radeon_fence_process(struct radeon_device *rdev, int ring) radeon_fence_process() argument
322 if (radeon_fence_activity(rdev, ring)) radeon_fence_process()
323 wake_up_all(&rdev->fence_queue); radeon_fence_process()
329 * @rdev: radeon device pointer
340 static bool radeon_fence_seq_signaled(struct radeon_device *rdev, radeon_fence_seq_signaled() argument
343 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { radeon_fence_seq_signaled()
347 radeon_fence_process(rdev, ring); radeon_fence_seq_signaled()
348 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { radeon_fence_seq_signaled()
357 struct radeon_device *rdev = fence->rdev; radeon_fence_is_signaled() local
361 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { radeon_fence_is_signaled()
365 if (down_read_trylock(&rdev->exclusive_lock)) { radeon_fence_is_signaled()
366 radeon_fence_process(rdev, ring); radeon_fence_is_signaled()
367 up_read(&rdev->exclusive_lock); radeon_fence_is_signaled()
369 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { radeon_fence_is_signaled()
387 struct radeon_device *rdev = fence->rdev; radeon_fence_enable_signaling() local
389 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) radeon_fence_enable_signaling()
392 if (down_read_trylock(&rdev->exclusive_lock)) { radeon_fence_enable_signaling()
393 radeon_irq_kms_sw_irq_get(rdev, fence->ring); radeon_fence_enable_signaling()
395 if (radeon_fence_activity(rdev, fence->ring)) radeon_fence_enable_signaling()
396 wake_up_all_locked(&rdev->fence_queue); radeon_fence_enable_signaling()
399 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { radeon_fence_enable_signaling()
400 radeon_irq_kms_sw_irq_put(rdev, fence->ring); radeon_fence_enable_signaling()
401 up_read(&rdev->exclusive_lock); radeon_fence_enable_signaling()
405 up_read(&rdev->exclusive_lock); radeon_fence_enable_signaling()
408 if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) radeon_fence_enable_signaling()
409 rdev->fence_drv[fence->ring].delayed_irq = true; radeon_fence_enable_signaling()
410 radeon_fence_schedule_check(rdev, fence->ring); radeon_fence_enable_signaling()
416 __add_wait_queue(&rdev->fence_queue, &fence->fence_wake); radeon_fence_enable_signaling()
436 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { radeon_fence_signaled()
450 * @rdev: radeon device pointer
458 static bool radeon_fence_any_seq_signaled(struct radeon_device *rdev, u64 *seq) radeon_fence_any_seq_signaled() argument
463 if (seq[i] && radeon_fence_seq_signaled(rdev, seq[i], i)) radeon_fence_any_seq_signaled()
472 * @rdev: radeon device pointer
486 static long radeon_fence_wait_seq_timeout(struct radeon_device *rdev, radeon_fence_wait_seq_timeout() argument
493 if (radeon_fence_any_seq_signaled(rdev, target_seq)) radeon_fence_wait_seq_timeout()
501 trace_radeon_fence_wait_begin(rdev->ddev, i, target_seq[i]); radeon_fence_wait_seq_timeout()
502 radeon_irq_kms_sw_irq_get(rdev, i); radeon_fence_wait_seq_timeout()
506 r = wait_event_interruptible_timeout(rdev->fence_queue, ( radeon_fence_wait_seq_timeout()
507 radeon_fence_any_seq_signaled(rdev, target_seq) radeon_fence_wait_seq_timeout()
508 || rdev->needs_reset), timeout); radeon_fence_wait_seq_timeout()
510 r = wait_event_timeout(rdev->fence_queue, ( radeon_fence_wait_seq_timeout()
511 radeon_fence_any_seq_signaled(rdev, target_seq) radeon_fence_wait_seq_timeout()
512 || rdev->needs_reset), timeout); radeon_fence_wait_seq_timeout()
515 if (rdev->needs_reset) radeon_fence_wait_seq_timeout()
522 radeon_irq_kms_sw_irq_put(rdev, i); radeon_fence_wait_seq_timeout()
523 trace_radeon_fence_wait_end(rdev->ddev, i, target_seq[i]); radeon_fence_wait_seq_timeout()
555 r = radeon_fence_wait_seq_timeout(fence->rdev, seq, intr, MAX_SCHEDULE_TIMEOUT); radeon_fence_wait()
569 * @rdev: radeon device pointer
579 int radeon_fence_wait_any(struct radeon_device *rdev, radeon_fence_wait_any() argument
602 r = radeon_fence_wait_seq_timeout(rdev, seq, intr, MAX_SCHEDULE_TIMEOUT); radeon_fence_wait_any()
612 * @rdev: radeon device pointer
619 int radeon_fence_wait_next(struct radeon_device *rdev, int ring) radeon_fence_wait_next() argument
624 seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL; radeon_fence_wait_next()
625 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { radeon_fence_wait_next()
630 r = radeon_fence_wait_seq_timeout(rdev, seq, false, MAX_SCHEDULE_TIMEOUT); radeon_fence_wait_next()
639 * @rdev: radeon device pointer
646 int radeon_fence_wait_empty(struct radeon_device *rdev, int ring) radeon_fence_wait_empty() argument
651 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; radeon_fence_wait_empty()
655 r = radeon_fence_wait_seq_timeout(rdev, seq, false, MAX_SCHEDULE_TIMEOUT); radeon_fence_wait_empty()
660 dev_err(rdev->dev, "error waiting for ring[%d] to become idle (%ld)\n", radeon_fence_wait_empty()
700 * @rdev: radeon device pointer
707 unsigned radeon_fence_count_emitted(struct radeon_device *rdev, int ring) radeon_fence_count_emitted() argument
714 radeon_fence_process(rdev, ring); radeon_fence_count_emitted()
715 emitted = rdev->fence_drv[ring].sync_seq[ring] radeon_fence_count_emitted()
716 - atomic64_read(&rdev->fence_drv[ring].last_seq); radeon_fence_count_emitted()
748 fdrv = &fence->rdev->fence_drv[dst_ring]; radeon_fence_need_sync()
779 src = &fence->rdev->fence_drv[fence->ring]; radeon_fence_note_sync()
780 dst = &fence->rdev->fence_drv[dst_ring]; radeon_fence_note_sync()
793 * @rdev: radeon device pointer
801 int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) radeon_fence_driver_start_ring() argument
806 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); radeon_fence_driver_start_ring()
807 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { radeon_fence_driver_start_ring()
808 rdev->fence_drv[ring].scratch_reg = 0; radeon_fence_driver_start_ring()
811 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; radeon_fence_driver_start_ring()
812 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + radeon_fence_driver_start_ring()
817 index = ALIGN(rdev->uvd_fw->size, 8); radeon_fence_driver_start_ring()
818 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; radeon_fence_driver_start_ring()
819 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; radeon_fence_driver_start_ring()
823 r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg); radeon_fence_driver_start_ring()
825 dev_err(rdev->dev, "fence failed to get scratch register\n"); radeon_fence_driver_start_ring()
829 rdev->fence_drv[ring].scratch_reg - radeon_fence_driver_start_ring()
830 rdev->scratch.reg_base; radeon_fence_driver_start_ring()
831 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; radeon_fence_driver_start_ring()
832 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; radeon_fence_driver_start_ring()
834 radeon_fence_write(rdev, atomic64_read(&rdev->fence_drv[ring].last_seq), ring); radeon_fence_driver_start_ring()
835 rdev->fence_drv[ring].initialized = true; radeon_fence_driver_start_ring()
836 dev_info(rdev->dev, "fence driver on ring %d use gpu addr 0x%016llx and cpu addr 0x%p\n", radeon_fence_driver_start_ring()
837 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); radeon_fence_driver_start_ring()
845 * @rdev: radeon device pointer
851 static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) radeon_fence_driver_init_ring() argument
855 rdev->fence_drv[ring].scratch_reg = -1; radeon_fence_driver_init_ring()
856 rdev->fence_drv[ring].cpu_addr = NULL; radeon_fence_driver_init_ring()
857 rdev->fence_drv[ring].gpu_addr = 0; radeon_fence_driver_init_ring()
859 rdev->fence_drv[ring].sync_seq[i] = 0; radeon_fence_driver_init_ring()
860 atomic64_set(&rdev->fence_drv[ring].last_seq, 0); radeon_fence_driver_init_ring()
861 rdev->fence_drv[ring].initialized = false; radeon_fence_driver_init_ring()
862 INIT_DELAYED_WORK(&rdev->fence_drv[ring].lockup_work, radeon_fence_driver_init_ring()
864 rdev->fence_drv[ring].rdev = rdev; radeon_fence_driver_init_ring()
871 * @rdev: radeon device pointer
879 int radeon_fence_driver_init(struct radeon_device *rdev) radeon_fence_driver_init() argument
883 init_waitqueue_head(&rdev->fence_queue); radeon_fence_driver_init()
885 radeon_fence_driver_init_ring(rdev, ring); radeon_fence_driver_init()
887 if (radeon_debugfs_fence_init(rdev)) { radeon_fence_driver_init()
888 dev_err(rdev->dev, "fence debugfs file creation failed\n"); radeon_fence_driver_init()
897 * @rdev: radeon device pointer
901 void radeon_fence_driver_fini(struct radeon_device *rdev) radeon_fence_driver_fini() argument
905 mutex_lock(&rdev->ring_lock); radeon_fence_driver_fini()
907 if (!rdev->fence_drv[ring].initialized) radeon_fence_driver_fini()
909 r = radeon_fence_wait_empty(rdev, ring); radeon_fence_driver_fini()
912 radeon_fence_driver_force_completion(rdev, ring); radeon_fence_driver_fini()
914 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); radeon_fence_driver_fini()
915 wake_up_all(&rdev->fence_queue); radeon_fence_driver_fini()
916 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); radeon_fence_driver_fini()
917 rdev->fence_drv[ring].initialized = false; radeon_fence_driver_fini()
919 mutex_unlock(&rdev->ring_lock); radeon_fence_driver_fini()
925 * @rdev: radeon device pointer
931 void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) radeon_fence_driver_force_completion() argument
933 if (rdev->fence_drv[ring].initialized) { radeon_fence_driver_force_completion()
934 radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring); radeon_fence_driver_force_completion()
935 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); radeon_fence_driver_force_completion()
948 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_fence_info() local
952 if (!rdev->fence_drv[i].initialized) radeon_debugfs_fence_info()
955 radeon_fence_process(rdev, i); radeon_debugfs_fence_info()
959 (unsigned long long)atomic64_read(&rdev->fence_drv[i].last_seq)); radeon_debugfs_fence_info()
961 rdev->fence_drv[i].sync_seq[i]); radeon_debugfs_fence_info()
964 if (i != j && rdev->fence_drv[j].initialized) radeon_debugfs_fence_info()
966 j, rdev->fence_drv[i].sync_seq[j]); radeon_debugfs_fence_info()
981 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_gpu_reset() local
983 down_read(&rdev->exclusive_lock); radeon_debugfs_gpu_reset()
984 seq_printf(m, "%d\n", rdev->needs_reset); radeon_debugfs_gpu_reset()
985 rdev->needs_reset = true; radeon_debugfs_gpu_reset()
986 wake_up_all(&rdev->fence_queue); radeon_debugfs_gpu_reset()
987 up_read(&rdev->exclusive_lock); radeon_debugfs_gpu_reset()
998 int radeon_debugfs_fence_init(struct radeon_device *rdev) radeon_debugfs_fence_init() argument
1001 return radeon_debugfs_add_files(rdev, radeon_debugfs_fence_list, 2); radeon_debugfs_fence_init()
1051 struct radeon_device *rdev = fence->rdev; radeon_fence_default_wait() local
1072 if (rdev->needs_reset) { radeon_fence_default_wait()
H A Dradeon_vce.c52 * @rdev: radeon_device pointer
56 int radeon_vce_init(struct radeon_device *rdev) radeon_vce_init() argument
65 INIT_DELAYED_WORK(&rdev->vce.idle_work, radeon_vce_idle_work_handler); radeon_vce_init()
67 switch (rdev->family) { radeon_vce_init()
88 r = request_firmware(&rdev->vce_fw, fw_name, rdev->dev); radeon_vce_init()
90 dev_err(rdev->dev, "radeon_vce: Can't load firmware \"%s\"\n", radeon_vce_init()
97 size = rdev->vce_fw->size - strlen(fw_version) - 9; radeon_vce_init()
98 c = rdev->vce_fw->data; radeon_vce_init()
112 size = rdev->vce_fw->size - strlen(fb_version) - 3; radeon_vce_init()
113 c = rdev->vce_fw->data; radeon_vce_init()
122 if (sscanf(c, "%2u]", &rdev->vce.fb_version) != 1) radeon_vce_init()
126 start, mid, end, rdev->vce.fb_version); radeon_vce_init()
128 rdev->vce.fw_version = (start << 24) | (mid << 16) | (end << 8); radeon_vce_init()
131 if ((rdev->vce.fw_version != ((40 << 24) | (2 << 16) | (2 << 8))) && radeon_vce_init()
132 (rdev->vce.fw_version != ((50 << 24) | (0 << 16) | (1 << 8))) && radeon_vce_init()
133 (rdev->vce.fw_version != ((50 << 24) | (1 << 16) | (2 << 8)))) radeon_vce_init()
138 if (rdev->family < CHIP_BONAIRE) radeon_vce_init()
139 size = vce_v1_0_bo_size(rdev); radeon_vce_init()
141 size = vce_v2_0_bo_size(rdev); radeon_vce_init()
142 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, radeon_vce_init()
144 &rdev->vce.vcpu_bo); radeon_vce_init()
146 dev_err(rdev->dev, "(%d) failed to allocate VCE bo\n", r); radeon_vce_init()
150 r = radeon_bo_reserve(rdev->vce.vcpu_bo, false); radeon_vce_init()
152 radeon_bo_unref(&rdev->vce.vcpu_bo); radeon_vce_init()
153 dev_err(rdev->dev, "(%d) failed to reserve VCE bo\n", r); radeon_vce_init()
157 r = radeon_bo_pin(rdev->vce.vcpu_bo, RADEON_GEM_DOMAIN_VRAM, radeon_vce_init()
158 &rdev->vce.gpu_addr); radeon_vce_init()
159 radeon_bo_unreserve(rdev->vce.vcpu_bo); radeon_vce_init()
161 radeon_bo_unref(&rdev->vce.vcpu_bo); radeon_vce_init()
162 dev_err(rdev->dev, "(%d) VCE bo pin failed\n", r); radeon_vce_init()
167 atomic_set(&rdev->vce.handles[i], 0); radeon_vce_init()
168 rdev->vce.filp[i] = NULL; radeon_vce_init()
177 * @rdev: radeon_device pointer
181 void radeon_vce_fini(struct radeon_device *rdev) radeon_vce_fini() argument
183 if (rdev->vce.vcpu_bo == NULL) radeon_vce_fini()
186 radeon_bo_unref(&rdev->vce.vcpu_bo); radeon_vce_fini()
188 release_firmware(rdev->vce_fw); radeon_vce_fini()
194 * @rdev: radeon_device pointer
197 int radeon_vce_suspend(struct radeon_device *rdev) radeon_vce_suspend() argument
201 if (rdev->vce.vcpu_bo == NULL) radeon_vce_suspend()
205 if (atomic_read(&rdev->vce.handles[i])) radeon_vce_suspend()
218 * @rdev: radeon_device pointer
221 int radeon_vce_resume(struct radeon_device *rdev) radeon_vce_resume() argument
226 if (rdev->vce.vcpu_bo == NULL) radeon_vce_resume()
229 r = radeon_bo_reserve(rdev->vce.vcpu_bo, false); radeon_vce_resume()
231 dev_err(rdev->dev, "(%d) failed to reserve VCE bo\n", r); radeon_vce_resume()
235 r = radeon_bo_kmap(rdev->vce.vcpu_bo, &cpu_addr); radeon_vce_resume()
237 radeon_bo_unreserve(rdev->vce.vcpu_bo); radeon_vce_resume()
238 dev_err(rdev->dev, "(%d) VCE map failed\n", r); radeon_vce_resume()
242 memset(cpu_addr, 0, radeon_bo_size(rdev->vce.vcpu_bo)); radeon_vce_resume()
243 if (rdev->family < CHIP_BONAIRE) radeon_vce_resume()
244 r = vce_v1_0_load_fw(rdev, cpu_addr); radeon_vce_resume()
246 memcpy(cpu_addr, rdev->vce_fw->data, rdev->vce_fw->size); radeon_vce_resume()
248 radeon_bo_kunmap(rdev->vce.vcpu_bo); radeon_vce_resume()
250 radeon_bo_unreserve(rdev->vce.vcpu_bo); radeon_vce_resume()
264 struct radeon_device *rdev = radeon_vce_idle_work_handler() local
267 if ((radeon_fence_count_emitted(rdev, TN_RING_TYPE_VCE1_INDEX) == 0) && radeon_vce_idle_work_handler()
268 (radeon_fence_count_emitted(rdev, TN_RING_TYPE_VCE2_INDEX) == 0)) { radeon_vce_idle_work_handler()
269 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_vce_idle_work_handler()
270 radeon_dpm_enable_vce(rdev, false); radeon_vce_idle_work_handler()
272 radeon_set_vce_clocks(rdev, 0, 0); radeon_vce_idle_work_handler()
275 schedule_delayed_work(&rdev->vce.idle_work, radeon_vce_idle_work_handler()
283 * @rdev: radeon_device pointer
287 void radeon_vce_note_usage(struct radeon_device *rdev) radeon_vce_note_usage() argument
290 bool set_clocks = !cancel_delayed_work_sync(&rdev->vce.idle_work); radeon_vce_note_usage()
291 set_clocks &= schedule_delayed_work(&rdev->vce.idle_work, radeon_vce_note_usage()
294 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_vce_note_usage()
300 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_vce_note_usage()
301 radeon_dpm_enable_vce(rdev, true); radeon_vce_note_usage()
303 radeon_set_vce_clocks(rdev, 53300, 40000); radeon_vce_note_usage()
311 * @rdev: radeon_device pointer
316 void radeon_vce_free_handles(struct radeon_device *rdev, struct drm_file *filp) radeon_vce_free_handles() argument
320 uint32_t handle = atomic_read(&rdev->vce.handles[i]); radeon_vce_free_handles()
321 if (!handle || rdev->vce.filp[i] != filp) radeon_vce_free_handles()
324 radeon_vce_note_usage(rdev); radeon_vce_free_handles()
326 r = radeon_vce_get_destroy_msg(rdev, TN_RING_TYPE_VCE1_INDEX, radeon_vce_free_handles()
331 rdev->vce.filp[i] = NULL; radeon_vce_free_handles()
332 atomic_set(&rdev->vce.handles[i], 0); radeon_vce_free_handles()
339 * @rdev: radeon_device pointer
346 int radeon_vce_get_create_msg(struct radeon_device *rdev, int ring, radeon_vce_get_create_msg() argument
354 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); radeon_vce_get_create_msg()
390 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_vce_get_create_msg()
398 radeon_ib_free(rdev, &ib); radeon_vce_get_create_msg()
406 * @rdev: radeon_device pointer
413 int radeon_vce_get_destroy_msg(struct radeon_device *rdev, int ring, radeon_vce_get_destroy_msg() argument
421 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); radeon_vce_get_destroy_msg()
447 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_vce_get_destroy_msg()
455 radeon_ib_free(rdev, &ib); radeon_vce_get_destroy_msg()
528 if (atomic_read(&p->rdev->vce.handles[i]) == handle) { radeon_vce_validate_handle()
529 if (p->rdev->vce.filp[i] != p->filp) { radeon_vce_validate_handle()
539 if (!atomic_cmpxchg(&p->rdev->vce.handles[i], 0, handle)) { radeon_vce_validate_handle()
540 p->rdev->vce.filp[i] = p->filp; radeon_vce_validate_handle()
541 p->rdev->vce.img_size[i] = 0; radeon_vce_validate_handle()
588 size = &p->rdev->vce.img_size[session_idx]; radeon_vce_cs_parse()
680 atomic_cmpxchg(&p->rdev->vce.handles[i], handle, 0); radeon_vce_cs_parse()
689 * @rdev: radeon_device pointer
695 bool radeon_vce_semaphore_emit(struct radeon_device *rdev, radeon_vce_semaphore_emit() argument
715 * @rdev: radeon_device pointer
719 void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) radeon_vce_ib_execute() argument
721 struct radeon_ring *ring = &rdev->ring[ib->ring]; radeon_vce_ib_execute()
731 * @rdev: radeon_device pointer
735 void radeon_vce_fence_emit(struct radeon_device *rdev, radeon_vce_fence_emit() argument
738 struct radeon_ring *ring = &rdev->ring[fence->ring]; radeon_vce_fence_emit()
739 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; radeon_vce_fence_emit()
752 * @rdev: radeon_device pointer
756 int radeon_vce_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) radeon_vce_ring_test() argument
758 uint32_t rptr = vce_v1_0_get_rptr(rdev, ring); radeon_vce_ring_test()
762 r = radeon_ring_lock(rdev, ring, 16); radeon_vce_ring_test()
769 radeon_ring_unlock_commit(rdev, ring, false); radeon_vce_ring_test()
771 for (i = 0; i < rdev->usec_timeout; i++) { radeon_vce_ring_test()
772 if (vce_v1_0_get_rptr(rdev, ring) != rptr) radeon_vce_ring_test()
777 if (i < rdev->usec_timeout) { radeon_vce_ring_test()
792 * @rdev: radeon_device pointer
796 int radeon_vce_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) radeon_vce_ib_test() argument
801 r = radeon_vce_get_create_msg(rdev, ring->idx, 1, NULL); radeon_vce_ib_test()
807 r = radeon_vce_get_destroy_msg(rdev, ring->idx, 1, &fence); radeon_vce_ib_test()
H A Dradeon_ib.c41 static int radeon_debugfs_sa_init(struct radeon_device *rdev);
46 * @rdev: radeon_device pointer
55 int radeon_ib_get(struct radeon_device *rdev, int ring, radeon_ib_get() argument
61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); radeon_ib_get()
63 dev_err(rdev->dev, "failed to get a new IB (%d)\n", r); radeon_ib_get()
89 * @rdev: radeon_device pointer
94 void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib) radeon_ib_free() argument
96 radeon_sync_free(rdev, &ib->sync, ib->fence); radeon_ib_free()
97 radeon_sa_bo_free(rdev, &ib->sa_bo, ib->fence); radeon_ib_free()
104 * @rdev: radeon_device pointer
122 int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib, radeon_ib_schedule() argument
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; radeon_ib_schedule()
130 dev_err(rdev->dev, "couldn't schedule ib\n"); radeon_ib_schedule()
135 r = radeon_ring_lock(rdev, ring, 64 + RADEON_NUM_SYNCS * 8); radeon_ib_schedule()
137 dev_err(rdev->dev, "scheduling IB failed (%d).\n", r); radeon_ib_schedule()
144 vm_id_fence = radeon_vm_grab_id(rdev, ib->vm, ib->ring); radeon_ib_schedule()
149 r = radeon_sync_rings(rdev, &ib->sync, ib->ring); radeon_ib_schedule()
151 dev_err(rdev->dev, "failed to sync rings (%d)\n", r); radeon_ib_schedule()
152 radeon_ring_unlock_undo(rdev, ring); radeon_ib_schedule()
157 radeon_vm_flush(rdev, ib->vm, ib->ring, radeon_ib_schedule()
161 radeon_ring_ib_execute(rdev, const_ib->ring, const_ib); radeon_ib_schedule()
162 radeon_sync_free(rdev, &const_ib->sync, NULL); radeon_ib_schedule()
164 radeon_ring_ib_execute(rdev, ib->ring, ib); radeon_ib_schedule()
165 r = radeon_fence_emit(rdev, &ib->fence, ib->ring); radeon_ib_schedule()
167 dev_err(rdev->dev, "failed to emit fence for new IB (%d)\n", r); radeon_ib_schedule()
168 radeon_ring_unlock_undo(rdev, ring); radeon_ib_schedule()
176 radeon_vm_fence(rdev, ib->vm, ib->fence); radeon_ib_schedule()
178 radeon_ring_unlock_commit(rdev, ring, hdp_flush); radeon_ib_schedule()
185 * @rdev: radeon_device pointer
191 int radeon_ib_pool_init(struct radeon_device *rdev) radeon_ib_pool_init() argument
195 if (rdev->ib_pool_ready) { radeon_ib_pool_init()
199 if (rdev->family >= CHIP_BONAIRE) { radeon_ib_pool_init()
200 r = radeon_sa_bo_manager_init(rdev, &rdev->ring_tmp_bo, radeon_ib_pool_init()
209 r = radeon_sa_bo_manager_init(rdev, &rdev->ring_tmp_bo, radeon_ib_pool_init()
218 r = radeon_sa_bo_manager_start(rdev, &rdev->ring_tmp_bo); radeon_ib_pool_init()
223 rdev->ib_pool_ready = true; radeon_ib_pool_init()
224 if (radeon_debugfs_sa_init(rdev)) { radeon_ib_pool_init()
225 dev_err(rdev->dev, "failed to register debugfs file for SA\n"); radeon_ib_pool_init()
233 * @rdev: radeon_device pointer
238 void radeon_ib_pool_fini(struct radeon_device *rdev) radeon_ib_pool_fini() argument
240 if (rdev->ib_pool_ready) { radeon_ib_pool_fini()
241 radeon_sa_bo_manager_suspend(rdev, &rdev->ring_tmp_bo); radeon_ib_pool_fini()
242 radeon_sa_bo_manager_fini(rdev, &rdev->ring_tmp_bo); radeon_ib_pool_fini()
243 rdev->ib_pool_ready = false; radeon_ib_pool_fini()
250 * @rdev: radeon_device pointer
257 int radeon_ib_ring_tests(struct radeon_device *rdev) radeon_ib_ring_tests() argument
263 struct radeon_ring *ring = &rdev->ring[i]; radeon_ib_ring_tests()
268 r = radeon_ib_test(rdev, i, ring); radeon_ib_ring_tests()
270 radeon_fence_driver_force_completion(rdev, i); radeon_ib_ring_tests()
272 rdev->needs_reset = false; radeon_ib_ring_tests()
277 rdev->accel_working = false; radeon_ib_ring_tests()
298 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_sa_info() local
300 radeon_sa_bo_dump_debug_info(&rdev->ring_tmp_bo, m); radeon_debugfs_sa_info()
312 static int radeon_debugfs_sa_init(struct radeon_device *rdev) radeon_debugfs_sa_init() argument
315 return radeon_debugfs_add_files(rdev, radeon_debugfs_sa_list, 1); radeon_debugfs_sa_init()
H A Dradeon_uvd.c63 int radeon_uvd_init(struct radeon_device *rdev) radeon_uvd_init() argument
69 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); radeon_uvd_init()
71 switch (rdev->family) { radeon_uvd_init()
133 r = request_firmware(&rdev->uvd_fw, fw_name, rdev->dev); radeon_uvd_init()
135 dev_err(rdev->dev, "radeon_uvd: Can't load firmware \"%s\"\n", radeon_uvd_init()
140 bo_size = RADEON_GPU_PAGE_ALIGN(rdev->uvd_fw->size + 8) + radeon_uvd_init()
143 r = radeon_bo_create(rdev, bo_size, PAGE_SIZE, true, radeon_uvd_init()
145 NULL, &rdev->uvd.vcpu_bo); radeon_uvd_init()
147 dev_err(rdev->dev, "(%d) failed to allocate UVD bo\n", r); radeon_uvd_init()
151 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); radeon_uvd_init()
153 radeon_bo_unref(&rdev->uvd.vcpu_bo); radeon_uvd_init()
154 dev_err(rdev->dev, "(%d) failed to reserve UVD bo\n", r); radeon_uvd_init()
158 r = radeon_bo_pin(rdev->uvd.vcpu_bo, RADEON_GEM_DOMAIN_VRAM, radeon_uvd_init()
159 &rdev->uvd.gpu_addr); radeon_uvd_init()
161 radeon_bo_unreserve(rdev->uvd.vcpu_bo); radeon_uvd_init()
162 radeon_bo_unref(&rdev->uvd.vcpu_bo); radeon_uvd_init()
163 dev_err(rdev->dev, "(%d) UVD bo pin failed\n", r); radeon_uvd_init()
167 r = radeon_bo_kmap(rdev->uvd.vcpu_bo, &rdev->uvd.cpu_addr); radeon_uvd_init()
169 dev_err(rdev->dev, "(%d) UVD map failed\n", r); radeon_uvd_init()
173 radeon_bo_unreserve(rdev->uvd.vcpu_bo); radeon_uvd_init()
176 atomic_set(&rdev->uvd.handles[i], 0); radeon_uvd_init()
177 rdev->uvd.filp[i] = NULL; radeon_uvd_init()
178 rdev->uvd.img_size[i] = 0; radeon_uvd_init()
184 void radeon_uvd_fini(struct radeon_device *rdev) radeon_uvd_fini() argument
188 if (rdev->uvd.vcpu_bo == NULL) radeon_uvd_fini()
191 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); radeon_uvd_fini()
193 radeon_bo_kunmap(rdev->uvd.vcpu_bo); radeon_uvd_fini()
194 radeon_bo_unpin(rdev->uvd.vcpu_bo); radeon_uvd_fini()
195 radeon_bo_unreserve(rdev->uvd.vcpu_bo); radeon_uvd_fini()
198 radeon_bo_unref(&rdev->uvd.vcpu_bo); radeon_uvd_fini()
200 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX]); radeon_uvd_fini()
202 release_firmware(rdev->uvd_fw); radeon_uvd_fini()
205 int radeon_uvd_suspend(struct radeon_device *rdev) radeon_uvd_suspend() argument
209 if (rdev->uvd.vcpu_bo == NULL) radeon_uvd_suspend()
213 uint32_t handle = atomic_read(&rdev->uvd.handles[i]); radeon_uvd_suspend()
217 radeon_uvd_note_usage(rdev); radeon_uvd_suspend()
219 r = radeon_uvd_get_destroy_msg(rdev, radeon_uvd_suspend()
229 rdev->uvd.filp[i] = NULL; radeon_uvd_suspend()
230 atomic_set(&rdev->uvd.handles[i], 0); radeon_uvd_suspend()
237 int radeon_uvd_resume(struct radeon_device *rdev) radeon_uvd_resume() argument
242 if (rdev->uvd.vcpu_bo == NULL) radeon_uvd_resume()
245 memcpy(rdev->uvd.cpu_addr, rdev->uvd_fw->data, rdev->uvd_fw->size); radeon_uvd_resume()
247 size = radeon_bo_size(rdev->uvd.vcpu_bo); radeon_uvd_resume()
248 size -= rdev->uvd_fw->size; radeon_uvd_resume()
250 ptr = rdev->uvd.cpu_addr; radeon_uvd_resume()
251 ptr += rdev->uvd_fw->size; radeon_uvd_resume()
284 void radeon_uvd_free_handles(struct radeon_device *rdev, struct drm_file *filp) radeon_uvd_free_handles() argument
288 uint32_t handle = atomic_read(&rdev->uvd.handles[i]); radeon_uvd_free_handles()
289 if (handle != 0 && rdev->uvd.filp[i] == filp) { radeon_uvd_free_handles()
292 radeon_uvd_note_usage(rdev); radeon_uvd_free_handles()
294 r = radeon_uvd_get_destroy_msg(rdev, radeon_uvd_free_handles()
304 rdev->uvd.filp[i] = NULL; radeon_uvd_free_handles()
305 atomic_set(&rdev->uvd.handles[i], 0); radeon_uvd_free_handles()
410 if (p->rdev->family >= CHIP_PALM) radeon_uvd_validate_codec()
473 if (atomic_read(&p->rdev->uvd.handles[i]) == handle) { radeon_uvd_cs_msg()
478 if (!atomic_cmpxchg(&p->rdev->uvd.handles[i], 0, handle)) { radeon_uvd_cs_msg()
479 p->rdev->uvd.filp[i] = p->filp; radeon_uvd_cs_msg()
480 p->rdev->uvd.img_size[i] = img_size; radeon_uvd_cs_msg()
499 if (atomic_read(&p->rdev->uvd.handles[i]) == handle) { radeon_uvd_cs_msg()
500 if (p->rdev->uvd.filp[i] != p->filp) { radeon_uvd_cs_msg()
514 atomic_cmpxchg(&p->rdev->uvd.handles[i], handle, 0); radeon_uvd_cs_msg()
581 (start >> 28) != (p->rdev->uvd.gpu_addr >> 28)) { radeon_uvd_cs_reloc()
695 static int radeon_uvd_send_msg(struct radeon_device *rdev, radeon_uvd_send_msg() argument
702 r = radeon_ib_get(rdev, ring, &ib, NULL, 64); radeon_uvd_send_msg()
716 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_uvd_send_msg()
721 radeon_ib_free(rdev, &ib); radeon_uvd_send_msg()
728 int radeon_uvd_get_create_msg(struct radeon_device *rdev, int ring, radeon_uvd_get_create_msg() argument
732 uint64_t offs = radeon_bo_size(rdev->uvd.vcpu_bo) - radeon_uvd_get_create_msg()
735 uint32_t *msg = rdev->uvd.cpu_addr + offs; radeon_uvd_get_create_msg()
736 uint64_t addr = rdev->uvd.gpu_addr + offs; radeon_uvd_get_create_msg()
740 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, true); radeon_uvd_get_create_msg()
759 r = radeon_uvd_send_msg(rdev, ring, addr, fence); radeon_uvd_get_create_msg()
760 radeon_bo_unreserve(rdev->uvd.vcpu_bo); radeon_uvd_get_create_msg()
764 int radeon_uvd_get_destroy_msg(struct radeon_device *rdev, int ring, radeon_uvd_get_destroy_msg() argument
768 uint64_t offs = radeon_bo_size(rdev->uvd.vcpu_bo) - radeon_uvd_get_destroy_msg()
771 uint32_t *msg = rdev->uvd.cpu_addr + offs; radeon_uvd_get_destroy_msg()
772 uint64_t addr = rdev->uvd.gpu_addr + offs; radeon_uvd_get_destroy_msg()
776 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, true); radeon_uvd_get_destroy_msg()
788 r = radeon_uvd_send_msg(rdev, ring, addr, fence); radeon_uvd_get_destroy_msg()
789 radeon_bo_unreserve(rdev->uvd.vcpu_bo); radeon_uvd_get_destroy_msg()
796 * @rdev: radeon_device pointer
802 static void radeon_uvd_count_handles(struct radeon_device *rdev, radeon_uvd_count_handles() argument
811 if (!atomic_read(&rdev->uvd.handles[i])) radeon_uvd_count_handles()
814 if (rdev->uvd.img_size[i] >= 720*576) radeon_uvd_count_handles()
823 struct radeon_device *rdev = radeon_uvd_idle_work_handler() local
826 if (radeon_fence_count_emitted(rdev, R600_RING_TYPE_UVD_INDEX) == 0) { radeon_uvd_idle_work_handler()
827 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_uvd_idle_work_handler()
828 radeon_uvd_count_handles(rdev, &rdev->pm.dpm.sd, radeon_uvd_idle_work_handler()
829 &rdev->pm.dpm.hd); radeon_uvd_idle_work_handler()
830 radeon_dpm_enable_uvd(rdev, false); radeon_uvd_idle_work_handler()
832 radeon_set_uvd_clocks(rdev, 0, 0); radeon_uvd_idle_work_handler()
835 schedule_delayed_work(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler()
840 void radeon_uvd_note_usage(struct radeon_device *rdev) radeon_uvd_note_usage() argument
843 bool set_clocks = !cancel_delayed_work_sync(&rdev->uvd.idle_work); radeon_uvd_note_usage()
844 set_clocks &= schedule_delayed_work(&rdev->uvd.idle_work, radeon_uvd_note_usage()
847 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_uvd_note_usage()
849 radeon_uvd_count_handles(rdev, &sd, &hd); radeon_uvd_note_usage()
850 if ((rdev->pm.dpm.sd != sd) || radeon_uvd_note_usage()
851 (rdev->pm.dpm.hd != hd)) { radeon_uvd_note_usage()
852 rdev->pm.dpm.sd = sd; radeon_uvd_note_usage()
853 rdev->pm.dpm.hd = hd; radeon_uvd_note_usage()
860 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { radeon_uvd_note_usage()
861 radeon_dpm_enable_uvd(rdev, true); radeon_uvd_note_usage()
863 radeon_set_uvd_clocks(rdev, 53300, 40000); radeon_uvd_note_usage()
893 * @rdev: radeon_device pointer
910 int radeon_uvd_calc_upll_dividers(struct radeon_device *rdev, radeon_uvd_calc_upll_dividers() argument
920 unsigned vco_freq, ref_freq = rdev->clock.spll.reference_freq; radeon_uvd_calc_upll_dividers()
973 int radeon_uvd_send_upll_ctlreq(struct radeon_device *rdev, radeon_uvd_send_upll_ctlreq() argument
H A Duvd_v1_0.c34 * @rdev: radeon_device pointer
39 uint32_t uvd_v1_0_get_rptr(struct radeon_device *rdev, uvd_v1_0_get_rptr() argument
48 * @rdev: radeon_device pointer
53 uint32_t uvd_v1_0_get_wptr(struct radeon_device *rdev, uvd_v1_0_get_wptr() argument
62 * @rdev: radeon_device pointer
67 void uvd_v1_0_set_wptr(struct radeon_device *rdev, uvd_v1_0_set_wptr() argument
76 * @rdev: radeon_device pointer
81 void uvd_v1_0_fence_emit(struct radeon_device *rdev, uvd_v1_0_fence_emit() argument
84 struct radeon_ring *ring = &rdev->ring[fence->ring]; uvd_v1_0_fence_emit()
85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; uvd_v1_0_fence_emit()
106 * @rdev: radeon_device pointer
110 int uvd_v1_0_resume(struct radeon_device *rdev) uvd_v1_0_resume() argument
116 r = radeon_uvd_resume(rdev); uvd_v1_0_resume()
121 addr = (rdev->uvd.gpu_addr >> 3) + 16; uvd_v1_0_resume()
122 size = RADEON_GPU_PAGE_ALIGN(rdev->uvd_fw->size) >> 3; uvd_v1_0_resume()
137 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; uvd_v1_0_resume()
141 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; uvd_v1_0_resume()
144 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); uvd_v1_0_resume()
152 * @rdev: radeon_device pointer
156 int uvd_v1_0_init(struct radeon_device *rdev) uvd_v1_0_init() argument
158 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; uvd_v1_0_init()
163 if (rdev->family < CHIP_RV740) uvd_v1_0_init()
164 radeon_set_uvd_clocks(rdev, 10000, 10000); uvd_v1_0_init()
166 radeon_set_uvd_clocks(rdev, 53300, 40000); uvd_v1_0_init()
168 r = uvd_v1_0_start(rdev); uvd_v1_0_init()
173 r = radeon_ring_test(rdev, R600_RING_TYPE_UVD_INDEX, ring); uvd_v1_0_init()
179 r = radeon_ring_lock(rdev, ring, 10); uvd_v1_0_init()
204 radeon_ring_unlock_commit(rdev, ring, false); uvd_v1_0_init()
208 radeon_set_uvd_clocks(rdev, 0, 0); uvd_v1_0_init()
211 switch (rdev->family) { uvd_v1_0_init()
243 * @rdev: radeon_device pointer
247 void uvd_v1_0_fini(struct radeon_device *rdev) uvd_v1_0_fini() argument
249 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; uvd_v1_0_fini()
251 uvd_v1_0_stop(rdev); uvd_v1_0_fini()
258 * @rdev: radeon_device pointer
262 int uvd_v1_0_start(struct radeon_device *rdev) uvd_v1_0_start() argument
264 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; uvd_v1_0_start()
386 * @rdev: radeon_device pointer
390 void uvd_v1_0_stop(struct radeon_device *rdev) uvd_v1_0_stop() argument
415 * @rdev: radeon_device pointer
420 int uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) uvd_v1_0_ring_test() argument
427 r = radeon_ring_lock(rdev, ring, 3); uvd_v1_0_ring_test()
435 radeon_ring_unlock_commit(rdev, ring, false); uvd_v1_0_ring_test()
436 for (i = 0; i < rdev->usec_timeout; i++) { uvd_v1_0_ring_test()
443 if (i < rdev->usec_timeout) { uvd_v1_0_ring_test()
457 * @rdev: radeon_device pointer
464 bool uvd_v1_0_semaphore_emit(struct radeon_device *rdev, uvd_v1_0_semaphore_emit() argument
476 * @rdev: radeon_device pointer
481 void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) uvd_v1_0_ib_execute() argument
483 struct radeon_ring *ring = &rdev->ring[ib->ring]; uvd_v1_0_ib_execute()
494 * @rdev: radeon_device pointer
499 int uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) uvd_v1_0_ib_test() argument
504 if (rdev->family < CHIP_RV740) uvd_v1_0_ib_test()
505 r = radeon_set_uvd_clocks(rdev, 10000, 10000); uvd_v1_0_ib_test()
507 r = radeon_set_uvd_clocks(rdev, 53300, 40000); uvd_v1_0_ib_test()
513 r = radeon_uvd_get_create_msg(rdev, ring->idx, 1, NULL); uvd_v1_0_ib_test()
519 r = radeon_uvd_get_destroy_msg(rdev, ring->idx, 1, &fence); uvd_v1_0_ib_test()
533 radeon_set_uvd_clocks(rdev, 0, 0); uvd_v1_0_ib_test()
H A Dr600_dma.c29 u32 r600_gpu_check_soft_reset(struct radeon_device *rdev);
46 * @rdev: radeon_device pointer
51 uint32_t r600_dma_get_rptr(struct radeon_device *rdev, r600_dma_get_rptr() argument
56 if (rdev->wb.enabled) r600_dma_get_rptr()
57 rptr = rdev->wb.wb[ring->rptr_offs/4]; r600_dma_get_rptr()
67 * @rdev: radeon_device pointer
72 uint32_t r600_dma_get_wptr(struct radeon_device *rdev, r600_dma_get_wptr() argument
81 * @rdev: radeon_device pointer
86 void r600_dma_set_wptr(struct radeon_device *rdev, r600_dma_set_wptr() argument
95 * @rdev: radeon_device pointer
99 void r600_dma_stop(struct radeon_device *rdev) r600_dma_stop() argument
103 if (rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) r600_dma_stop()
104 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); r600_dma_stop()
109 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; r600_dma_stop()
115 * @rdev: radeon_device pointer
120 int r600_dma_resume(struct radeon_device *rdev) r600_dma_resume() argument
122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; r600_dma_resume()
144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); r600_dma_resume()
146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); r600_dma_resume()
148 if (rdev->wb.enabled) r600_dma_resume()
164 if (rdev->family >= CHIP_RV770) r600_dma_resume()
174 r = radeon_ring_test(rdev, R600_RING_TYPE_DMA_INDEX, ring); r600_dma_resume()
180 if (rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) r600_dma_resume()
181 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); r600_dma_resume()
189 * @rdev: radeon_device pointer
193 void r600_dma_fini(struct radeon_device *rdev) r600_dma_fini() argument
195 r600_dma_stop(rdev); r600_dma_fini()
196 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); r600_dma_fini()
202 * @rdev: radeon_device pointer
208 bool r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_is_lockup() argument
210 u32 reset_mask = r600_gpu_check_soft_reset(rdev); r600_dma_is_lockup()
213 radeon_ring_lockup_update(rdev, ring); r600_dma_is_lockup()
216 return radeon_ring_test_lockup(rdev, ring); r600_dma_is_lockup()
223 * @rdev: radeon_device pointer
230 int r600_dma_ring_test(struct radeon_device *rdev, r600_dma_ring_test() argument
244 gpu_addr = rdev->wb.gpu_addr + index; r600_dma_ring_test()
247 rdev->wb.wb[index/4] = cpu_to_le32(tmp); r600_dma_ring_test()
249 r = radeon_ring_lock(rdev, ring, 4); r600_dma_ring_test()
258 radeon_ring_unlock_commit(rdev, ring, false); r600_dma_ring_test()
260 for (i = 0; i < rdev->usec_timeout; i++) { r600_dma_ring_test()
261 tmp = le32_to_cpu(rdev->wb.wb[index/4]); r600_dma_ring_test()
267 if (i < rdev->usec_timeout) { r600_dma_ring_test()
280 * @rdev: radeon_device pointer
287 void r600_dma_fence_ring_emit(struct radeon_device *rdev, r600_dma_fence_ring_emit() argument
290 struct radeon_ring *ring = &rdev->ring[fence->ring]; r600_dma_fence_ring_emit()
291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; r600_dma_fence_ring_emit()
305 * @rdev: radeon_device pointer
313 bool r600_dma_semaphore_ring_emit(struct radeon_device *rdev, r600_dma_semaphore_ring_emit() argument
331 * @rdev: radeon_device pointer
337 int r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) r600_dma_ib_test() argument
351 gpu_addr = rdev->wb.gpu_addr + index; r600_dma_ib_test()
353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); r600_dma_ib_test()
365 r = radeon_ib_schedule(rdev, &ib, NULL, false); r600_dma_ib_test()
367 radeon_ib_free(rdev, &ib); r600_dma_ib_test()
376 for (i = 0; i < rdev->usec_timeout; i++) { r600_dma_ib_test()
377 tmp = le32_to_cpu(rdev->wb.wb[index/4]); r600_dma_ib_test()
382 if (i < rdev->usec_timeout) { r600_dma_ib_test()
388 radeon_ib_free(rdev, &ib); r600_dma_ib_test()
395 * @rdev: radeon_device pointer
400 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) r600_dma_ring_ib_execute() argument
402 struct radeon_ring *ring = &rdev->ring[ib->ring]; r600_dma_ring_ib_execute()
404 if (rdev->wb.enabled) { r600_dma_ring_ib_execute()
429 * @rdev: radeon_device pointer
439 struct radeon_fence *r600_copy_dma(struct radeon_device *rdev, r600_copy_dma() argument
446 int ring_index = rdev->asic->copy.dma_ring_index; r600_copy_dma()
447 struct radeon_ring *ring = &rdev->ring[ring_index]; r600_copy_dma()
456 r = radeon_ring_lock(rdev, ring, num_loops * 4 + 8); r600_copy_dma()
459 radeon_sync_free(rdev, &sync, NULL); r600_copy_dma()
463 radeon_sync_resv(rdev, &sync, resv, false); r600_copy_dma()
464 radeon_sync_rings(rdev, &sync, ring->idx); r600_copy_dma()
480 r = radeon_fence_emit(rdev, &fence, ring->idx); r600_copy_dma()
482 radeon_ring_unlock_undo(rdev, ring); r600_copy_dma()
483 radeon_sync_free(rdev, &sync, NULL); r600_copy_dma()
487 radeon_ring_unlock_commit(rdev, ring, false); r600_copy_dma()
488 radeon_sync_free(rdev, &sync, fence); r600_copy_dma()
H A Dradeon_ring.c45 static int radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring);
51 * @rdev: radeon_device pointer
57 bool radeon_ring_supports_scratch_reg(struct radeon_device *rdev, radeon_ring_supports_scratch_reg() argument
73 * @rdev: radeon_device pointer
78 void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *ring) radeon_ring_free_size() argument
80 uint32_t rptr = radeon_ring_get_rptr(rdev, ring); radeon_ring_free_size()
90 radeon_ring_lockup_update(rdev, ring); radeon_ring_free_size()
97 * @rdev: radeon_device pointer
104 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) radeon_ring_alloc() argument
113 radeon_ring_free_size(rdev, ring); radeon_ring_alloc()
116 radeon_ring_free_size(rdev, ring); radeon_ring_alloc()
120 r = radeon_fence_wait_next(rdev, ring->idx); radeon_ring_alloc()
132 * @rdev: radeon_device pointer
140 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) radeon_ring_lock() argument
144 mutex_lock(&rdev->ring_lock); radeon_ring_lock()
145 r = radeon_ring_alloc(rdev, ring, ndw); radeon_ring_lock()
147 mutex_unlock(&rdev->ring_lock); radeon_ring_lock()
157 * @rdev: radeon_device pointer
164 void radeon_ring_commit(struct radeon_device *rdev, struct radeon_ring *ring, radeon_ring_commit() argument
170 if (hdp_flush && rdev->asic->ring[ring->idx]->hdp_flush) radeon_ring_commit()
171 rdev->asic->ring[ring->idx]->hdp_flush(rdev, ring); radeon_ring_commit()
180 if (hdp_flush && rdev->asic->mmio_hdp_flush) radeon_ring_commit()
181 rdev->asic->mmio_hdp_flush(rdev); radeon_ring_commit()
182 radeon_ring_set_wptr(rdev, ring); radeon_ring_commit()
189 * @rdev: radeon_device pointer
195 void radeon_ring_unlock_commit(struct radeon_device *rdev, struct radeon_ring *ring, radeon_ring_unlock_commit() argument
198 radeon_ring_commit(rdev, ring, hdp_flush); radeon_ring_unlock_commit()
199 mutex_unlock(&rdev->ring_lock); radeon_ring_unlock_commit()
221 void radeon_ring_unlock_undo(struct radeon_device *rdev, struct radeon_ring *ring) radeon_ring_unlock_undo() argument
224 mutex_unlock(&rdev->ring_lock); radeon_ring_unlock_undo()
234 void radeon_ring_lockup_update(struct radeon_device *rdev, radeon_ring_lockup_update() argument
237 atomic_set(&ring->last_rptr, radeon_ring_get_rptr(rdev, ring)); radeon_ring_lockup_update()
243 * @rdev: radeon device structure
247 bool radeon_ring_test_lockup(struct radeon_device *rdev, struct radeon_ring *ring) radeon_ring_test_lockup() argument
249 uint32_t rptr = radeon_ring_get_rptr(rdev, ring); radeon_ring_test_lockup()
255 radeon_ring_lockup_update(rdev, ring); radeon_ring_test_lockup()
261 dev_err(rdev->dev, "ring %d stalled for more than %llumsec\n", radeon_ring_test_lockup()
272 * @rdev: radeon_device pointer
277 unsigned radeon_ring_backup(struct radeon_device *rdev, struct radeon_ring *ring, radeon_ring_backup() argument
283 mutex_lock(&rdev->ring_lock); radeon_ring_backup()
287 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
292 if (!radeon_fence_count_emitted(rdev, ring->idx)) { radeon_ring_backup()
293 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
300 else if (rdev->wb.enabled) radeon_ring_backup()
304 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
312 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
319 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
327 mutex_unlock(&rdev->ring_lock); radeon_ring_backup()
334 * @rdev: radeon_device pointer
341 int radeon_ring_restore(struct radeon_device *rdev, struct radeon_ring *ring, radeon_ring_restore() argument
350 r = radeon_ring_lock(rdev, ring, size); radeon_ring_restore()
358 radeon_ring_unlock_commit(rdev, ring, false); radeon_ring_restore()
366 * @rdev: radeon_device pointer
375 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, radeon_ring_init() argument
385 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, radeon_ring_init()
389 dev_err(rdev->dev, "(%d) ring create failed\n", r); radeon_ring_init()
399 dev_err(rdev->dev, "(%d) ring pin failed\n", r); radeon_ring_init()
406 dev_err(rdev->dev, "(%d) ring map failed\n", r); radeon_ring_init()
412 if (rdev->wb.enabled) { radeon_ring_init()
414 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; radeon_ring_init()
415 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; radeon_ring_init()
417 if (radeon_debugfs_ring_init(rdev, ring)) { radeon_ring_init()
420 radeon_ring_lockup_update(rdev, ring); radeon_ring_init()
427 * @rdev: radeon_device pointer
432 void radeon_ring_fini(struct radeon_device *rdev, struct radeon_ring *ring) radeon_ring_fini() argument
437 mutex_lock(&rdev->ring_lock); radeon_ring_fini()
442 mutex_unlock(&rdev->ring_lock); radeon_ring_fini()
464 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_ring_info() local
466 struct radeon_ring *ring = &rdev->ring[ridx]; radeon_debugfs_ring_info()
471 radeon_ring_free_size(rdev, ring); radeon_debugfs_ring_info()
474 wptr = radeon_ring_get_wptr(rdev, ring); radeon_debugfs_ring_info()
478 rptr = radeon_ring_get_rptr(rdev, ring); radeon_debugfs_ring_info()
539 static int radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring) radeon_debugfs_ring_init() argument
548 if (&rdev->ring[ridx] != ring) radeon_debugfs_ring_init()
551 r = radeon_debugfs_add_files(rdev, info, 1); radeon_debugfs_ring_init()
H A Devergreen_dma.c29 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev);
34 * @rdev: radeon_device pointer
41 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev, evergreen_dma_fence_ring_emit() argument
44 struct radeon_ring *ring = &rdev->ring[fence->ring]; evergreen_dma_fence_ring_emit()
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; evergreen_dma_fence_ring_emit()
62 * @rdev: radeon_device pointer
67 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev, evergreen_dma_ring_ib_execute() argument
70 struct radeon_ring *ring = &rdev->ring[ib->ring]; evergreen_dma_ring_ib_execute()
72 if (rdev->wb.enabled) { evergreen_dma_ring_ib_execute()
97 * @rdev: radeon_device pointer
107 struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev, evergreen_copy_dma() argument
115 int ring_index = rdev->asic->copy.dma_ring_index; evergreen_copy_dma()
116 struct radeon_ring *ring = &rdev->ring[ring_index]; evergreen_copy_dma()
125 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11); evergreen_copy_dma()
128 radeon_sync_free(rdev, &sync, NULL); evergreen_copy_dma()
132 radeon_sync_resv(rdev, &sync, resv, false); evergreen_copy_dma()
133 radeon_sync_rings(rdev, &sync, ring->idx); evergreen_copy_dma()
149 r = radeon_fence_emit(rdev, &fence, ring->idx); evergreen_copy_dma()
151 radeon_ring_unlock_undo(rdev, ring); evergreen_copy_dma()
152 radeon_sync_free(rdev, &sync, NULL); evergreen_copy_dma()
156 radeon_ring_unlock_commit(rdev, ring, false); evergreen_copy_dma()
157 radeon_sync_free(rdev, &sync, fence); evergreen_copy_dma()
165 * @rdev: radeon_device pointer
171 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) evergreen_dma_is_lockup() argument
173 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev); evergreen_dma_is_lockup()
176 radeon_ring_lockup_update(rdev, ring); evergreen_dma_is_lockup()
179 return radeon_ring_test_lockup(rdev, ring); evergreen_dma_is_lockup()
H A Dradeon_test.c34 static void radeon_do_test_moves(struct radeon_device *rdev, int flag) radeon_do_test_moves() argument
44 ring = radeon_copy_dma_ring_index(rdev); radeon_do_test_moves()
47 ring = radeon_copy_blit_ring_index(rdev); radeon_do_test_moves()
59 n = rdev->mc.gtt_size - rdev->gart_pin_size; radeon_do_test_moves()
69 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM, radeon_do_test_moves()
89 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, radeon_do_test_moves()
120 fence = radeon_copy_dma(rdev, gtt_addr, vram_addr, radeon_do_test_moves()
124 fence = radeon_copy_blit(rdev, gtt_addr, vram_addr, radeon_do_test_moves()
157 (gtt_addr - rdev->mc.gtt_start + radeon_do_test_moves()
160 (vram_addr - rdev->mc.vram_start + radeon_do_test_moves()
171 fence = radeon_copy_dma(rdev, vram_addr, gtt_addr, radeon_do_test_moves()
175 fence = radeon_copy_blit(rdev, vram_addr, gtt_addr, radeon_do_test_moves()
208 (vram_addr - rdev->mc.vram_start + radeon_do_test_moves()
211 (gtt_addr - rdev->mc.gtt_start + radeon_do_test_moves()
221 gtt_addr - rdev->mc.gtt_start); radeon_do_test_moves()
253 void radeon_test_moves(struct radeon_device *rdev) radeon_test_moves() argument
255 if (rdev->asic->copy.dma) radeon_test_moves()
256 radeon_do_test_moves(rdev, RADEON_TEST_COPY_DMA); radeon_test_moves()
257 if (rdev->asic->copy.blit) radeon_test_moves()
258 radeon_do_test_moves(rdev, RADEON_TEST_COPY_BLIT); radeon_test_moves()
261 static int radeon_test_create_and_emit_fence(struct radeon_device *rdev, radeon_test_create_and_emit_fence() argument
269 r = radeon_uvd_get_create_msg(rdev, ring->idx, handle, NULL); radeon_test_create_and_emit_fence()
275 r = radeon_uvd_get_destroy_msg(rdev, ring->idx, handle, fence); radeon_test_create_and_emit_fence()
283 r = radeon_vce_get_create_msg(rdev, ring->idx, handle, NULL); radeon_test_create_and_emit_fence()
289 r = radeon_vce_get_destroy_msg(rdev, ring->idx, handle, fence); radeon_test_create_and_emit_fence()
296 r = radeon_ring_lock(rdev, ring, 64); radeon_test_create_and_emit_fence()
301 radeon_fence_emit(rdev, fence, ring->idx); radeon_test_create_and_emit_fence()
302 radeon_ring_unlock_commit(rdev, ring, false); radeon_test_create_and_emit_fence()
307 void radeon_test_ring_sync(struct radeon_device *rdev, radeon_test_ring_sync() argument
315 r = radeon_semaphore_create(rdev, &semaphore); radeon_test_ring_sync()
321 r = radeon_ring_lock(rdev, ringA, 64); radeon_test_ring_sync()
326 radeon_semaphore_emit_wait(rdev, ringA->idx, semaphore); radeon_test_ring_sync()
327 radeon_ring_unlock_commit(rdev, ringA, false); radeon_test_ring_sync()
329 r = radeon_test_create_and_emit_fence(rdev, ringA, &fence1); radeon_test_ring_sync()
333 r = radeon_ring_lock(rdev, ringA, 64); radeon_test_ring_sync()
338 radeon_semaphore_emit_wait(rdev, ringA->idx, semaphore); radeon_test_ring_sync()
339 radeon_ring_unlock_commit(rdev, ringA, false); radeon_test_ring_sync()
341 r = radeon_test_create_and_emit_fence(rdev, ringA, &fence2); radeon_test_ring_sync()
352 r = radeon_ring_lock(rdev, ringB, 64); radeon_test_ring_sync()
357 radeon_semaphore_emit_signal(rdev, ringB->idx, semaphore); radeon_test_ring_sync()
358 radeon_ring_unlock_commit(rdev, ringB, false); radeon_test_ring_sync()
373 r = radeon_ring_lock(rdev, ringB, 64); radeon_test_ring_sync()
378 radeon_semaphore_emit_signal(rdev, ringB->idx, semaphore); radeon_test_ring_sync()
379 radeon_ring_unlock_commit(rdev, ringB, false); radeon_test_ring_sync()
388 radeon_semaphore_free(rdev, &semaphore, NULL); radeon_test_ring_sync()
400 static void radeon_test_ring_sync2(struct radeon_device *rdev, radeon_test_ring_sync2() argument
410 r = radeon_semaphore_create(rdev, &semaphore); radeon_test_ring_sync2()
416 r = radeon_ring_lock(rdev, ringA, 64); radeon_test_ring_sync2()
421 radeon_semaphore_emit_wait(rdev, ringA->idx, semaphore); radeon_test_ring_sync2()
422 radeon_ring_unlock_commit(rdev, ringA, false); radeon_test_ring_sync2()
424 r = radeon_test_create_and_emit_fence(rdev, ringA, &fenceA); radeon_test_ring_sync2()
428 r = radeon_ring_lock(rdev, ringB, 64); radeon_test_ring_sync2()
433 radeon_semaphore_emit_wait(rdev, ringB->idx, semaphore); radeon_test_ring_sync2()
434 radeon_ring_unlock_commit(rdev, ringB, false); radeon_test_ring_sync2()
435 r = radeon_test_create_and_emit_fence(rdev, ringB, &fenceB); radeon_test_ring_sync2()
450 r = radeon_ring_lock(rdev, ringC, 64); radeon_test_ring_sync2()
455 radeon_semaphore_emit_signal(rdev, ringC->idx, semaphore); radeon_test_ring_sync2()
456 radeon_ring_unlock_commit(rdev, ringC, false); radeon_test_ring_sync2()
476 r = radeon_ring_lock(rdev, ringC, 64); radeon_test_ring_sync2()
481 radeon_semaphore_emit_signal(rdev, ringC->idx, semaphore); radeon_test_ring_sync2()
482 radeon_ring_unlock_commit(rdev, ringC, false); radeon_test_ring_sync2()
498 radeon_semaphore_free(rdev, &semaphore, NULL); radeon_test_ring_sync2()
520 void radeon_test_syncing(struct radeon_device *rdev) radeon_test_syncing() argument
525 struct radeon_ring *ringA = &rdev->ring[i]; radeon_test_syncing()
530 struct radeon_ring *ringB = &rdev->ring[j]; radeon_test_syncing()
538 radeon_test_ring_sync(rdev, ringA, ringB); radeon_test_syncing()
541 radeon_test_ring_sync(rdev, ringB, ringA); radeon_test_syncing()
544 struct radeon_ring *ringC = &rdev->ring[k]; radeon_test_syncing()
555 radeon_test_ring_sync2(rdev, ringA, ringB, ringC); radeon_test_syncing()
558 radeon_test_ring_sync2(rdev, ringA, ringC, ringB); radeon_test_syncing()
561 radeon_test_ring_sync2(rdev, ringB, ringA, ringC); radeon_test_syncing()
564 radeon_test_ring_sync2(rdev, ringB, ringC, ringA); radeon_test_syncing()
567 radeon_test_ring_sync2(rdev, ringC, ringA, ringB); radeon_test_syncing()
570 radeon_test_ring_sync2(rdev, ringC, ringB, ringA); radeon_test_syncing()
H A Dr100.c71 static bool r100_is_in_vblank(struct radeon_device *rdev, int crtc) r100_is_in_vblank() argument
86 static bool r100_is_counter_moving(struct radeon_device *rdev, int crtc) r100_is_counter_moving() argument
106 * @rdev: radeon_device pointer
111 void r100_wait_for_vblank(struct radeon_device *rdev, int crtc) r100_wait_for_vblank() argument
115 if (crtc >= rdev->num_crtc) r100_wait_for_vblank()
129 while (r100_is_in_vblank(rdev, crtc)) { r100_wait_for_vblank()
131 if (!r100_is_counter_moving(rdev, crtc)) r100_wait_for_vblank()
136 while (!r100_is_in_vblank(rdev, crtc)) { r100_wait_for_vblank()
138 if (!r100_is_counter_moving(rdev, crtc)) r100_wait_for_vblank()
147 * @rdev: radeon_device pointer
156 void r100_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base) r100_page_flip() argument
158 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; r100_page_flip()
167 for (i = 0; i < rdev->usec_timeout; i++) { r100_page_flip()
183 * @rdev: radeon_device pointer
189 bool r100_page_flip_pending(struct radeon_device *rdev, int crtc_id) r100_page_flip_pending() argument
191 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; r100_page_flip_pending()
201 * @rdev: radeon_device pointer
207 void r100_pm_get_dynpm_state(struct radeon_device *rdev) r100_pm_get_dynpm_state() argument
210 rdev->pm.dynpm_can_upclock = true; r100_pm_get_dynpm_state()
211 rdev->pm.dynpm_can_downclock = true; r100_pm_get_dynpm_state()
213 switch (rdev->pm.dynpm_planned_action) { r100_pm_get_dynpm_state()
215 rdev->pm.requested_power_state_index = 0; r100_pm_get_dynpm_state()
216 rdev->pm.dynpm_can_downclock = false; r100_pm_get_dynpm_state()
219 if (rdev->pm.current_power_state_index == 0) { r100_pm_get_dynpm_state()
220 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r100_pm_get_dynpm_state()
221 rdev->pm.dynpm_can_downclock = false; r100_pm_get_dynpm_state()
223 if (rdev->pm.active_crtc_count > 1) { r100_pm_get_dynpm_state()
224 for (i = 0; i < rdev->pm.num_power_states; i++) { r100_pm_get_dynpm_state()
225 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY) r100_pm_get_dynpm_state()
227 else if (i >= rdev->pm.current_power_state_index) { r100_pm_get_dynpm_state()
228 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r100_pm_get_dynpm_state()
231 rdev->pm.requested_power_state_index = i; r100_pm_get_dynpm_state()
236 rdev->pm.requested_power_state_index = r100_pm_get_dynpm_state()
237 rdev->pm.current_power_state_index - 1; r100_pm_get_dynpm_state()
240 if ((rdev->pm.active_crtc_count > 0) && r100_pm_get_dynpm_state()
241 (rdev->pm.power_state[rdev->pm.requested_power_state_index].clock_info[0].flags & r100_pm_get_dynpm_state()
243 rdev->pm.requested_power_state_index++; r100_pm_get_dynpm_state()
247 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) { r100_pm_get_dynpm_state()
248 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r100_pm_get_dynpm_state()
249 rdev->pm.dynpm_can_upclock = false; r100_pm_get_dynpm_state()
251 if (rdev->pm.active_crtc_count > 1) { r100_pm_get_dynpm_state()
252 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) { r100_pm_get_dynpm_state()
253 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY) r100_pm_get_dynpm_state()
255 else if (i <= rdev->pm.current_power_state_index) { r100_pm_get_dynpm_state()
256 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index; r100_pm_get_dynpm_state()
259 rdev->pm.requested_power_state_index = i; r100_pm_get_dynpm_state()
264 rdev->pm.requested_power_state_index = r100_pm_get_dynpm_state()
265 rdev->pm.current_power_state_index + 1; r100_pm_get_dynpm_state()
269 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index; r100_pm_get_dynpm_state()
270 rdev->pm.dynpm_can_upclock = false; r100_pm_get_dynpm_state()
278 rdev->pm.requested_clock_mode_index = 0; r100_pm_get_dynpm_state()
281 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r100_pm_get_dynpm_state()
282 clock_info[rdev->pm.requested_clock_mode_index].sclk, r100_pm_get_dynpm_state()
283 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r100_pm_get_dynpm_state()
284 clock_info[rdev->pm.requested_clock_mode_index].mclk, r100_pm_get_dynpm_state()
285 rdev->pm.power_state[rdev->pm.requested_power_state_index]. r100_pm_get_dynpm_state()
292 * @rdev: radeon_device pointer
298 void r100_pm_init_profile(struct radeon_device *rdev) r100_pm_init_profile() argument
301 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
302 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
303 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
304 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
306 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
307 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; r100_pm_init_profile()
308 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
309 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
311 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
312 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0; r100_pm_init_profile()
313 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
314 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
316 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
317 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
318 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
319 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
321 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
322 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
323 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
324 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
326 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
327 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
328 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
329 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
331 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0; r100_pm_init_profile()
332 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; r100_pm_init_profile()
333 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; r100_pm_init_profile()
334 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0; r100_pm_init_profile()
340 * @rdev: radeon_device pointer
345 void r100_pm_misc(struct radeon_device *rdev) r100_pm_misc() argument
347 int requested_index = rdev->pm.requested_power_state_index; r100_pm_misc()
348 struct radeon_power_state *ps = &rdev->pm.power_state[requested_index]; r100_pm_misc()
425 if ((rdev->flags & RADEON_IS_PCIE) && r100_pm_misc()
426 !(rdev->flags & RADEON_IS_IGP) && r100_pm_misc()
427 rdev->asic->pm.set_pcie_lanes && r100_pm_misc()
429 rdev->pm.power_state[rdev->pm.current_power_state_index].pcie_lanes)) { r100_pm_misc()
430 radeon_set_pcie_lanes(rdev, r100_pm_misc()
439 * @rdev: radeon_device pointer
443 void r100_pm_prepare(struct radeon_device *rdev) r100_pm_prepare() argument
445 struct drm_device *ddev = rdev->ddev; r100_pm_prepare()
470 * @rdev: radeon_device pointer
474 void r100_pm_finish(struct radeon_device *rdev) r100_pm_finish() argument
476 struct drm_device *ddev = rdev->ddev; r100_pm_finish()
501 * @rdev: radeon_device pointer
506 bool r100_gui_idle(struct radeon_device *rdev) r100_gui_idle() argument
518 * @rdev: radeon_device pointer
524 bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) r100_hpd_sense() argument
546 * @rdev: radeon_device pointer
551 void r100_hpd_set_polarity(struct radeon_device *rdev, r100_hpd_set_polarity() argument
555 bool connected = r100_hpd_sense(rdev, hpd); r100_hpd_set_polarity()
582 * @rdev: radeon_device pointer
587 void r100_hpd_init(struct radeon_device *rdev) r100_hpd_init() argument
589 struct drm_device *dev = rdev->ddev; r100_hpd_init()
596 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd); r100_hpd_init()
598 radeon_irq_kms_enable_hpd(rdev, enable); r100_hpd_init()
604 * @rdev: radeon_device pointer
609 void r100_hpd_fini(struct radeon_device *rdev) r100_hpd_fini() argument
611 struct drm_device *dev = rdev->ddev; r100_hpd_fini()
619 radeon_irq_kms_disable_hpd(rdev, disable); r100_hpd_fini()
625 void r100_pci_gart_tlb_flush(struct radeon_device *rdev) r100_pci_gart_tlb_flush() argument
633 int r100_pci_gart_init(struct radeon_device *rdev) r100_pci_gart_init() argument
637 if (rdev->gart.ptr) { r100_pci_gart_init()
642 r = radeon_gart_init(rdev); r100_pci_gart_init()
645 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; r100_pci_gart_init()
646 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; r100_pci_gart_init()
647 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; r100_pci_gart_init()
648 rdev->asic->gart.set_page = &r100_pci_gart_set_page; r100_pci_gart_init()
649 return radeon_gart_table_ram_alloc(rdev); r100_pci_gart_init()
652 int r100_pci_gart_enable(struct radeon_device *rdev) r100_pci_gart_enable() argument
660 WREG32(RADEON_AIC_LO_ADDR, rdev->mc.gtt_start); r100_pci_gart_enable()
661 WREG32(RADEON_AIC_HI_ADDR, rdev->mc.gtt_end); r100_pci_gart_enable()
663 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); r100_pci_gart_enable()
666 r100_pci_gart_tlb_flush(rdev); r100_pci_gart_enable()
668 (unsigned)(rdev->mc.gtt_size >> 20), r100_pci_gart_enable()
669 (unsigned long long)rdev->gart.table_addr); r100_pci_gart_enable()
670 rdev->gart.ready = true; r100_pci_gart_enable()
674 void r100_pci_gart_disable(struct radeon_device *rdev) r100_pci_gart_disable() argument
690 void r100_pci_gart_set_page(struct radeon_device *rdev, unsigned i, r100_pci_gart_set_page() argument
693 u32 *gtt = rdev->gart.ptr; r100_pci_gart_set_page()
697 void r100_pci_gart_fini(struct radeon_device *rdev) r100_pci_gart_fini() argument
699 radeon_gart_fini(rdev); r100_pci_gart_fini()
700 r100_pci_gart_disable(rdev); r100_pci_gart_fini()
701 radeon_gart_table_ram_free(rdev); r100_pci_gart_fini()
704 int r100_irq_set(struct radeon_device *rdev) r100_irq_set() argument
708 if (!rdev->irq.installed) { r100_irq_set()
713 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { r100_irq_set()
716 if (rdev->irq.crtc_vblank_int[0] || r100_irq_set()
717 atomic_read(&rdev->irq.pflip[0])) { r100_irq_set()
720 if (rdev->irq.crtc_vblank_int[1] || r100_irq_set()
721 atomic_read(&rdev->irq.pflip[1])) { r100_irq_set()
724 if (rdev->irq.hpd[0]) { r100_irq_set()
727 if (rdev->irq.hpd[1]) { r100_irq_set()
738 void r100_irq_disable(struct radeon_device *rdev) r100_irq_disable() argument
749 static uint32_t r100_irq_ack(struct radeon_device *rdev) r100_irq_ack() argument
762 int r100_irq_process(struct radeon_device *rdev) r100_irq_process() argument
767 status = r100_irq_ack(rdev); r100_irq_process()
771 if (rdev->shutdown) { r100_irq_process()
777 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); r100_irq_process()
781 if (rdev->irq.crtc_vblank_int[0]) { r100_irq_process()
782 drm_handle_vblank(rdev->ddev, 0); r100_irq_process()
783 rdev->pm.vblank_sync = true; r100_irq_process()
784 wake_up(&rdev->irq.vblank_queue); r100_irq_process()
786 if (atomic_read(&rdev->irq.pflip[0])) r100_irq_process()
787 radeon_crtc_handle_vblank(rdev, 0); r100_irq_process()
790 if (rdev->irq.crtc_vblank_int[1]) { r100_irq_process()
791 drm_handle_vblank(rdev->ddev, 1); r100_irq_process()
792 rdev->pm.vblank_sync = true; r100_irq_process()
793 wake_up(&rdev->irq.vblank_queue); r100_irq_process()
795 if (atomic_read(&rdev->irq.pflip[1])) r100_irq_process()
796 radeon_crtc_handle_vblank(rdev, 1); r100_irq_process()
806 status = r100_irq_ack(rdev); r100_irq_process()
809 schedule_delayed_work(&rdev->hotplug_work, 0); r100_irq_process()
810 if (rdev->msi_enabled) { r100_irq_process()
811 switch (rdev->family) { r100_irq_process()
826 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc) r100_get_vblank_counter() argument
836 * rdev: radeon device structure
839 static void r100_ring_hdp_flush(struct radeon_device *rdev, struct radeon_ring *ring) r100_ring_hdp_flush() argument
842 radeon_ring_write(ring, rdev->config.r100.hdp_cntl | r100_ring_hdp_flush()
845 radeon_ring_write(ring, rdev->config.r100.hdp_cntl); r100_ring_hdp_flush()
850 void r100_fence_ring_emit(struct radeon_device *rdev, r100_fence_ring_emit() argument
853 struct radeon_ring *ring = &rdev->ring[fence->ring]; r100_fence_ring_emit()
864 r100_ring_hdp_flush(rdev, ring); r100_fence_ring_emit()
866 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); r100_fence_ring_emit()
872 bool r100_semaphore_ring_emit(struct radeon_device *rdev, r100_semaphore_ring_emit() argument
882 struct radeon_fence *r100_copy_blit(struct radeon_device *rdev, r100_copy_blit() argument
888 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r100_copy_blit()
907 r = radeon_ring_lock(rdev, ring, ndw); r100_copy_blit()
950 r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); r100_copy_blit()
952 radeon_ring_unlock_undo(rdev, ring); r100_copy_blit()
955 radeon_ring_unlock_commit(rdev, ring, false); r100_copy_blit()
959 static int r100_cp_wait_for_idle(struct radeon_device *rdev) r100_cp_wait_for_idle() argument
964 for (i = 0; i < rdev->usec_timeout; i++) { r100_cp_wait_for_idle()
974 void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring) r100_ring_start() argument
978 r = radeon_ring_lock(rdev, ring, 2); r100_ring_start()
988 radeon_ring_unlock_commit(rdev, ring, false); r100_ring_start()
993 static int r100_cp_init_microcode(struct radeon_device *rdev) r100_cp_init_microcode() argument
1000 if ((rdev->family == CHIP_R100) || (rdev->family == CHIP_RV100) || r100_cp_init_microcode()
1001 (rdev->family == CHIP_RV200) || (rdev->family == CHIP_RS100) || r100_cp_init_microcode()
1002 (rdev->family == CHIP_RS200)) { r100_cp_init_microcode()
1005 } else if ((rdev->family == CHIP_R200) || r100_cp_init_microcode()
1006 (rdev->family == CHIP_RV250) || r100_cp_init_microcode()
1007 (rdev->family == CHIP_RV280) || r100_cp_init_microcode()
1008 (rdev->family == CHIP_RS300)) { r100_cp_init_microcode()
1011 } else if ((rdev->family == CHIP_R300) || r100_cp_init_microcode()
1012 (rdev->family == CHIP_R350) || r100_cp_init_microcode()
1013 (rdev->family == CHIP_RV350) || r100_cp_init_microcode()
1014 (rdev->family == CHIP_RV380) || r100_cp_init_microcode()
1015 (rdev->family == CHIP_RS400) || r100_cp_init_microcode()
1016 (rdev->family == CHIP_RS480)) { r100_cp_init_microcode()
1019 } else if ((rdev->family == CHIP_R420) || r100_cp_init_microcode()
1020 (rdev->family == CHIP_R423) || r100_cp_init_microcode()
1021 (rdev->family == CHIP_RV410)) { r100_cp_init_microcode()
1024 } else if ((rdev->family == CHIP_RS690) || r100_cp_init_microcode()
1025 (rdev->family == CHIP_RS740)) { r100_cp_init_microcode()
1028 } else if (rdev->family == CHIP_RS600) { r100_cp_init_microcode()
1031 } else if ((rdev->family == CHIP_RV515) || r100_cp_init_microcode()
1032 (rdev->family == CHIP_R520) || r100_cp_init_microcode()
1033 (rdev->family == CHIP_RV530) || r100_cp_init_microcode()
1034 (rdev->family == CHIP_R580) || r100_cp_init_microcode()
1035 (rdev->family == CHIP_RV560) || r100_cp_init_microcode()
1036 (rdev->family == CHIP_RV570)) { r100_cp_init_microcode()
1041 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); r100_cp_init_microcode()
1045 } else if (rdev->me_fw->size % 8) { r100_cp_init_microcode()
1048 rdev->me_fw->size, fw_name); r100_cp_init_microcode()
1050 release_firmware(rdev->me_fw); r100_cp_init_microcode()
1051 rdev->me_fw = NULL; r100_cp_init_microcode()
1056 u32 r100_gfx_get_rptr(struct radeon_device *rdev, r100_gfx_get_rptr() argument
1061 if (rdev->wb.enabled) r100_gfx_get_rptr()
1062 rptr = le32_to_cpu(rdev->wb.wb[ring->rptr_offs/4]); r100_gfx_get_rptr()
1069 u32 r100_gfx_get_wptr(struct radeon_device *rdev, r100_gfx_get_wptr() argument
1079 void r100_gfx_set_wptr(struct radeon_device *rdev, r100_gfx_set_wptr() argument
1086 static void r100_cp_load_microcode(struct radeon_device *rdev) r100_cp_load_microcode() argument
1091 if (r100_gui_wait_for_idle(rdev)) { r100_cp_load_microcode()
1096 if (rdev->me_fw) { r100_cp_load_microcode()
1097 size = rdev->me_fw->size / 4; r100_cp_load_microcode()
1098 fw_data = (const __be32 *)&rdev->me_fw->data[0]; r100_cp_load_microcode()
1109 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size) r100_cp_init() argument
1111 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r100_cp_init()
1122 if (r100_debugfs_cp_init(rdev)) { r100_cp_init()
1125 if (!rdev->me_fw) { r100_cp_init()
1126 r = r100_cp_init_microcode(rdev); r100_cp_init()
1136 r100_cp_load_microcode(rdev); r100_cp_init()
1137 r = radeon_ring_init(rdev, ring, ring_size, RADEON_WB_CP_RPTR_OFFSET, r100_cp_init()
1187 S_00070C_RB_RPTR_ADDR((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) >> 2)); r100_cp_init()
1188 WREG32(R_000774_SCRATCH_ADDR, rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET); r100_cp_init()
1190 if (rdev->wb.enabled) r100_cp_init()
1208 pci_set_master(rdev->pdev); r100_cp_init()
1210 radeon_ring_start(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); r100_cp_init()
1211 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring); r100_cp_init()
1217 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); r100_cp_init()
1220 && radeon_ring_supports_scratch_reg(rdev, ring)) { r100_cp_init()
1221 r = radeon_scratch_get(rdev, &ring->rptr_save_reg); r100_cp_init()
1230 void r100_cp_fini(struct radeon_device *rdev) r100_cp_fini() argument
1232 if (r100_cp_wait_for_idle(rdev)) { r100_cp_fini()
1236 r100_cp_disable(rdev); r100_cp_fini()
1237 radeon_scratch_free(rdev, rdev->ring[RADEON_RING_TYPE_GFX_INDEX].rptr_save_reg); r100_cp_fini()
1238 radeon_ring_fini(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); r100_cp_fini()
1242 void r100_cp_disable(struct radeon_device *rdev) r100_cp_disable() argument
1245 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); r100_cp_disable()
1246 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; r100_cp_disable()
1250 if (r100_gui_wait_for_idle(rdev)) { r100_cp_disable()
1465 crtc = drm_crtc_find(p->rdev->ddev, crtc_id); r100_cs_packet_parse_vline()
1961 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
1973 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
1985 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
1992 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
1999 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
2006 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
2013 r = r100_cs_track_check(p->rdev, track); r100_packet3_check()
2020 if (p->rdev->hyperz_filp != p->filp) r100_packet3_check()
2041 r100_cs_track_clear(p->rdev, track); r100_cs_parse()
2051 if (p->rdev->family >= CHIP_R200) r100_cs_parse()
2053 p->rdev->config.r100.reg_safe_bm, r100_cs_parse()
2054 p->rdev->config.r100.reg_safe_bm_size, r100_cs_parse()
2058 p->rdev->config.r100.reg_safe_bm, r100_cs_parse()
2059 p->rdev->config.r100.reg_safe_bm_size, r100_cs_parse()
2125 static int r100_cs_track_cube(struct radeon_device *rdev, r100_cs_track_cube() argument
2156 static int r100_cs_track_texture_check(struct radeon_device *rdev, r100_cs_track_texture_check() argument
2177 if (rdev->family < CHIP_R300) r100_cs_track_texture_check()
2183 if (rdev->family >= CHIP_RV515) r100_cs_track_texture_check()
2190 if (rdev->family >= CHIP_RV515) r100_cs_track_texture_check()
2217 ret = r100_cs_track_cube(rdev, track, u); r100_cs_track_texture_check()
2238 int r100_cs_track_check(struct radeon_device *rdev, struct r100_cs_track *track) r100_cs_track_check() argument
2324 dev_err(rdev->dev, "(PW %u) Vertex array %u " r100_cs_track_check()
2343 dev_err(rdev->dev, "(PW %u) Vertex array %u " r100_cs_track_check()
2370 return r100_cs_track_texture_check(rdev, track); r100_cs_track_check()
2375 void r100_cs_track_clear(struct radeon_device *rdev, struct r100_cs_track *track) r100_cs_track_clear() argument
2384 if (rdev->family < CHIP_R300) { r100_cs_track_clear()
2386 if (rdev->family <= CHIP_RS200) r100_cs_track_clear()
2428 if (rdev->family <= CHIP_RS200) { r100_cs_track_clear()
2455 static void r100_errata(struct radeon_device *rdev) r100_errata() argument
2457 rdev->pll_errata = 0; r100_errata()
2459 if (rdev->family == CHIP_RV200 || rdev->family == CHIP_RS200) { r100_errata()
2460 rdev->pll_errata |= CHIP_ERRATA_PLL_DUMMYREADS; r100_errata()
2463 if (rdev->family == CHIP_RV100 || r100_errata()
2464 rdev->family == CHIP_RS100 || r100_errata()
2465 rdev->family == CHIP_RS200) { r100_errata()
2466 rdev->pll_errata |= CHIP_ERRATA_PLL_DELAY; r100_errata()
2470 static int r100_rbbm_fifo_wait_for_entry(struct radeon_device *rdev, unsigned n) r100_rbbm_fifo_wait_for_entry() argument
2475 for (i = 0; i < rdev->usec_timeout; i++) { r100_rbbm_fifo_wait_for_entry()
2485 int r100_gui_wait_for_idle(struct radeon_device *rdev) r100_gui_wait_for_idle() argument
2490 if (r100_rbbm_fifo_wait_for_entry(rdev, 64)) { r100_gui_wait_for_idle()
2494 for (i = 0; i < rdev->usec_timeout; i++) { r100_gui_wait_for_idle()
2504 int r100_mc_wait_for_idle(struct radeon_device *rdev) r100_mc_wait_for_idle() argument
2509 for (i = 0; i < rdev->usec_timeout; i++) { r100_mc_wait_for_idle()
2520 bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) r100_gpu_is_lockup() argument
2526 radeon_ring_lockup_update(rdev, ring); r100_gpu_is_lockup()
2529 return radeon_ring_test_lockup(rdev, ring); r100_gpu_is_lockup()
2533 void r100_enable_bm(struct radeon_device *rdev) r100_enable_bm() argument
2541 void r100_bm_disable(struct radeon_device *rdev) r100_bm_disable() argument
2554 pci_clear_master(rdev->pdev); r100_bm_disable()
2558 int r100_asic_reset(struct radeon_device *rdev) r100_asic_reset() argument
2568 r100_mc_stop(rdev, &save); r100_asic_reset()
2570 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r100_asic_reset()
2579 pci_save_state(rdev->pdev); r100_asic_reset()
2581 r100_bm_disable(rdev); r100_asic_reset()
2591 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r100_asic_reset()
2599 dev_info(rdev->dev, "(%s:%d) RBBM_STATUS=0x%08X\n", __func__, __LINE__, status); r100_asic_reset()
2601 pci_restore_state(rdev->pdev); r100_asic_reset()
2602 r100_enable_bm(rdev); r100_asic_reset()
2606 dev_err(rdev->dev, "failed to reset GPU\n"); r100_asic_reset()
2609 dev_info(rdev->dev, "GPU reset succeed\n"); r100_asic_reset()
2610 r100_mc_resume(rdev, &save); r100_asic_reset()
2614 void r100_set_common_regs(struct radeon_device *rdev) r100_set_common_regs() argument
2616 struct drm_device *dev = rdev->ddev; r100_set_common_regs()
2701 static void r100_vram_get_type(struct radeon_device *rdev) r100_vram_get_type() argument
2705 rdev->mc.vram_is_ddr = false; r100_vram_get_type()
2706 if (rdev->flags & RADEON_IS_IGP) r100_vram_get_type()
2707 rdev->mc.vram_is_ddr = true; r100_vram_get_type()
2709 rdev->mc.vram_is_ddr = true; r100_vram_get_type()
2710 if ((rdev->family == CHIP_RV100) || r100_vram_get_type()
2711 (rdev->family == CHIP_RS100) || r100_vram_get_type()
2712 (rdev->family == CHIP_RS200)) { r100_vram_get_type()
2715 rdev->mc.vram_width = 32; r100_vram_get_type()
2717 rdev->mc.vram_width = 64; r100_vram_get_type()
2719 if (rdev->flags & RADEON_SINGLE_CRTC) { r100_vram_get_type()
2720 rdev->mc.vram_width /= 4; r100_vram_get_type()
2721 rdev->mc.vram_is_ddr = true; r100_vram_get_type()
2723 } else if (rdev->family <= CHIP_RV280) { r100_vram_get_type()
2726 rdev->mc.vram_width = 128; r100_vram_get_type()
2728 rdev->mc.vram_width = 64; r100_vram_get_type()
2732 rdev->mc.vram_width = 128; r100_vram_get_type()
2736 static u32 r100_get_accessible_vram(struct radeon_device *rdev) r100_get_accessible_vram() argument
2746 if (rdev->family == CHIP_RV280 || r100_get_accessible_vram()
2747 rdev->family >= CHIP_RV350) { r100_get_accessible_vram()
2758 pci_read_config_byte(rdev->pdev, 0xe, &byte); r100_get_accessible_vram()
2774 void r100_vram_init_sizes(struct radeon_device *rdev) r100_vram_init_sizes() argument
2779 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); r100_vram_init_sizes()
2780 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); r100_vram_init_sizes()
2781 rdev->mc.visible_vram_size = r100_get_accessible_vram(rdev); r100_vram_init_sizes()
2783 if (rdev->mc.visible_vram_size > rdev->mc.aper_size) r100_vram_init_sizes()
2784 rdev->mc.visible_vram_size = rdev->mc.aper_size; r100_vram_init_sizes()
2786 if (rdev->flags & RADEON_IS_IGP) { r100_vram_init_sizes()
2790 rdev->mc.real_vram_size = (((tom >> 16) - (tom & 0xffff) + 1) << 16); r100_vram_init_sizes()
2791 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size); r100_vram_init_sizes()
2792 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; r100_vram_init_sizes()
2794 rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE); r100_vram_init_sizes()
2798 if (rdev->mc.real_vram_size == 0) { r100_vram_init_sizes()
2799 rdev->mc.real_vram_size = 8192 * 1024; r100_vram_init_sizes()
2800 WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size); r100_vram_init_sizes()
2805 if (rdev->mc.aper_size > config_aper_size) r100_vram_init_sizes()
2806 config_aper_size = rdev->mc.aper_size; r100_vram_init_sizes()
2808 if (config_aper_size > rdev->mc.real_vram_size) r100_vram_init_sizes()
2809 rdev->mc.mc_vram_size = config_aper_size; r100_vram_init_sizes()
2811 rdev->mc.mc_vram_size = rdev->mc.real_vram_size; r100_vram_init_sizes()
2815 void r100_vga_set_state(struct radeon_device *rdev, bool state) r100_vga_set_state() argument
2829 static void r100_mc_init(struct radeon_device *rdev) r100_mc_init() argument
2833 r100_vram_get_type(rdev); r100_mc_init()
2834 r100_vram_init_sizes(rdev); r100_mc_init()
2835 base = rdev->mc.aper_base; r100_mc_init()
2836 if (rdev->flags & RADEON_IS_IGP) r100_mc_init()
2838 radeon_vram_location(rdev, &rdev->mc, base); r100_mc_init()
2839 rdev->mc.gtt_base_align = 0; r100_mc_init()
2840 if (!(rdev->flags & RADEON_IS_AGP)) r100_mc_init()
2841 radeon_gtt_location(rdev, &rdev->mc); r100_mc_init()
2842 radeon_update_bandwidth_info(rdev); r100_mc_init()
2849 void r100_pll_errata_after_index(struct radeon_device *rdev) r100_pll_errata_after_index() argument
2851 if (rdev->pll_errata & CHIP_ERRATA_PLL_DUMMYREADS) { r100_pll_errata_after_index()
2857 static void r100_pll_errata_after_data(struct radeon_device *rdev) r100_pll_errata_after_data() argument
2862 if (rdev->pll_errata & CHIP_ERRATA_PLL_DELAY) { r100_pll_errata_after_data()
2871 if (rdev->pll_errata & CHIP_ERRATA_R300_CG) { r100_pll_errata_after_data()
2882 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg) r100_pll_rreg() argument
2887 spin_lock_irqsave(&rdev->pll_idx_lock, flags); r100_pll_rreg()
2889 r100_pll_errata_after_index(rdev); r100_pll_rreg()
2891 r100_pll_errata_after_data(rdev); r100_pll_rreg()
2892 spin_unlock_irqrestore(&rdev->pll_idx_lock, flags); r100_pll_rreg()
2896 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) r100_pll_wreg() argument
2900 spin_lock_irqsave(&rdev->pll_idx_lock, flags); r100_pll_wreg()
2902 r100_pll_errata_after_index(rdev); r100_pll_wreg()
2904 r100_pll_errata_after_data(rdev); r100_pll_wreg()
2905 spin_unlock_irqrestore(&rdev->pll_idx_lock, flags); r100_pll_wreg()
2908 static void r100_set_safe_registers(struct radeon_device *rdev) r100_set_safe_registers() argument
2910 if (ASIC_IS_RN50(rdev)) { r100_set_safe_registers()
2911 rdev->config.r100.reg_safe_bm = rn50_reg_safe_bm; r100_set_safe_registers()
2912 rdev->config.r100.reg_safe_bm_size = ARRAY_SIZE(rn50_reg_safe_bm); r100_set_safe_registers()
2913 } else if (rdev->family < CHIP_R200) { r100_set_safe_registers()
2914 rdev->config.r100.reg_safe_bm = r100_reg_safe_bm; r100_set_safe_registers()
2915 rdev->config.r100.reg_safe_bm_size = ARRAY_SIZE(r100_reg_safe_bm); r100_set_safe_registers()
2917 r200_set_safe_registers(rdev); r100_set_safe_registers()
2929 struct radeon_device *rdev = dev->dev_private; r100_debugfs_rbbm_info() local
2950 struct radeon_device *rdev = dev->dev_private; r100_debugfs_cp_ring_info() local
2951 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r100_debugfs_cp_ring_info()
2955 radeon_ring_free_size(rdev, ring); r100_debugfs_cp_ring_info()
2978 struct radeon_device *rdev = dev->dev_private; r100_debugfs_cp_csq_fifo() local
3028 struct radeon_device *rdev = dev->dev_private; r100_debugfs_mc_info() local
3068 int r100_debugfs_rbbm_init(struct radeon_device *rdev) r100_debugfs_rbbm_init() argument
3071 return radeon_debugfs_add_files(rdev, r100_debugfs_rbbm_list, 1); r100_debugfs_rbbm_init()
3077 int r100_debugfs_cp_init(struct radeon_device *rdev) r100_debugfs_cp_init() argument
3080 return radeon_debugfs_add_files(rdev, r100_debugfs_cp_list, 2); r100_debugfs_cp_init()
3086 int r100_debugfs_mc_info_init(struct radeon_device *rdev) r100_debugfs_mc_info_init() argument
3089 return radeon_debugfs_add_files(rdev, r100_debugfs_mc_info_list, 1); r100_debugfs_mc_info_init()
3095 int r100_set_surface_reg(struct radeon_device *rdev, int reg, r100_set_surface_reg() argument
3102 if (rdev->family <= CHIP_RS200) { r100_set_surface_reg()
3112 } else if (rdev->family <= CHIP_RV280) { r100_set_surface_reg()
3130 if (rdev->family < CHIP_R300) r100_set_surface_reg()
3143 void r100_clear_surface_reg(struct radeon_device *rdev, int reg) r100_clear_surface_reg() argument
3149 void r100_bandwidth_update(struct radeon_device *rdev) r100_bandwidth_update() argument
3223 if (!rdev->mode_info.mode_config_initialized) r100_bandwidth_update()
3226 radeon_update_display_priority(rdev); r100_bandwidth_update()
3228 if (rdev->mode_info.crtcs[0]->base.enabled) { r100_bandwidth_update()
3229 mode1 = &rdev->mode_info.crtcs[0]->base.mode; r100_bandwidth_update()
3230 pixel_bytes1 = rdev->mode_info.crtcs[0]->base.primary->fb->bits_per_pixel / 8; r100_bandwidth_update()
3232 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { r100_bandwidth_update()
3233 if (rdev->mode_info.crtcs[1]->base.enabled) { r100_bandwidth_update()
3234 mode2 = &rdev->mode_info.crtcs[1]->base.mode; r100_bandwidth_update()
3235 pixel_bytes2 = rdev->mode_info.crtcs[1]->base.primary->fb->bits_per_pixel / 8; r100_bandwidth_update()
3241 if ((rdev->disp_priority == 2) && ASIC_IS_R300(rdev)) { r100_bandwidth_update()
3256 sclk_ff = rdev->pm.sclk; r100_bandwidth_update()
3257 mclk_ff = rdev->pm.mclk; r100_bandwidth_update()
3259 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1); r100_bandwidth_update()
3289 if ((rdev->family == CHIP_RV100) || (rdev->flags & RADEON_IS_IGP)) { /* RV100, M6, IGPs */ r100_bandwidth_update()
3293 } else if (rdev->family == CHIP_R300 || r100_bandwidth_update()
3294 rdev->family == CHIP_R350) { /* r300, r350 */ r100_bandwidth_update()
3298 } else if (rdev->family == CHIP_RV350 || r100_bandwidth_update()
3299 rdev->family <= CHIP_RV380) { r100_bandwidth_update()
3304 } else if (rdev->family == CHIP_R420 || r100_bandwidth_update()
3305 rdev->family == CHIP_R423 || r100_bandwidth_update()
3306 rdev->family == CHIP_RV410) { r100_bandwidth_update()
3330 if ((rdev->family == CHIP_RV100) || rdev->flags & RADEON_IS_IGP) { r100_bandwidth_update()
3331 if (rdev->family == CHIP_RS480) /* don't think rs400 */ r100_bandwidth_update()
3338 if (rdev->family == CHIP_RS400 || r100_bandwidth_update()
3339 rdev->family == CHIP_RS480) { r100_bandwidth_update()
3346 if (ASIC_IS_R300(rdev) && !(rdev->flags & RADEON_IS_IGP)) { r100_bandwidth_update()
3367 if (rdev->family == CHIP_RV410 || r100_bandwidth_update()
3368 rdev->family == CHIP_R420 || r100_bandwidth_update()
3369 rdev->family == CHIP_R423) r100_bandwidth_update()
3378 if (rdev->flags & RADEON_IS_AGP) { r100_bandwidth_update()
3386 if (ASIC_IS_R300(rdev)) { r100_bandwidth_update()
3389 if ((rdev->family == CHIP_RV100) || r100_bandwidth_update()
3390 rdev->flags & RADEON_IS_IGP) { r100_bandwidth_update()
3391 if (rdev->mc.vram_is_ddr) r100_bandwidth_update()
3396 if (rdev->mc.vram_width == 128) r100_bandwidth_update()
3405 if (rdev->mc.vram_is_ddr) { r100_bandwidth_update()
3406 if (rdev->mc.vram_width == 32) { r100_bandwidth_update()
3433 temp_ff.full = dfixed_const((2 * (cur_size - (rdev->mc.vram_is_ddr + 1)))); r100_bandwidth_update()
3455 if (ASIC_IS_RV100(rdev)) r100_bandwidth_update()
3484 if (rdev->disp_priority == 2) { r100_bandwidth_update()
3495 if (critical_point == 0 && mode2 && rdev->family == CHIP_R300) { r100_bandwidth_update()
3504 if ((rdev->family == CHIP_R350) && r100_bandwidth_update()
3520 if ((rdev->family == CHIP_RS400) || r100_bandwidth_update()
3521 (rdev->family == CHIP_RS480)) { r100_bandwidth_update()
3560 if ((rdev->family == CHIP_R350) && r100_bandwidth_update()
3570 if ((rdev->family == CHIP_RS100) || r100_bandwidth_update()
3571 (rdev->family == CHIP_RS200)) r100_bandwidth_update()
3574 temp = (rdev->mc.vram_width * rdev->mc.vram_is_ddr + 1)/128; r100_bandwidth_update()
3594 if (rdev->disp_priority == 2) { r100_bandwidth_update()
3603 if (critical_point2 == 0 && rdev->family == CHIP_R300) { r100_bandwidth_update()
3611 if ((rdev->family == CHIP_RS400) || r100_bandwidth_update()
3612 (rdev->family == CHIP_RS480)) { r100_bandwidth_update()
3640 rdev->mode_info.crtcs[0]->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode1->crtc_hdisplay); r100_bandwidth_update()
3643 rdev->mode_info.crtcs[1]->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode2->crtc_hdisplay); r100_bandwidth_update()
3646 int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) r100_ring_test() argument
3653 r = radeon_scratch_get(rdev, &scratch); r100_ring_test()
3659 r = radeon_ring_lock(rdev, ring, 2); r100_ring_test()
3662 radeon_scratch_free(rdev, scratch); r100_ring_test()
3667 radeon_ring_unlock_commit(rdev, ring, false); r100_ring_test()
3668 for (i = 0; i < rdev->usec_timeout; i++) { r100_ring_test()
3675 if (i < rdev->usec_timeout) { r100_ring_test()
3682 radeon_scratch_free(rdev, scratch); r100_ring_test()
3686 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) r100_ring_ib_execute() argument
3688 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; r100_ring_ib_execute()
3701 int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) r100_ib_test() argument
3709 r = radeon_scratch_get(rdev, &scratch); r100_ib_test()
3715 r = radeon_ib_get(rdev, RADEON_RING_TYPE_GFX_INDEX, &ib, NULL, 256); r100_ib_test()
3729 r = radeon_ib_schedule(rdev, &ib, NULL, false); r100_ib_test()
3739 for (i = 0; i < rdev->usec_timeout; i++) { r100_ib_test()
3746 if (i < rdev->usec_timeout) { r100_ib_test()
3754 radeon_ib_free(rdev, &ib); r100_ib_test()
3756 radeon_scratch_free(rdev, scratch); r100_ib_test()
3760 void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save) r100_mc_stop() argument
3765 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; r100_mc_stop()
3773 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { r100_mc_stop()
3790 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { r100_mc_stop()
3802 void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save) r100_mc_resume() argument
3805 WREG32(R_00023C_DISPLAY_BASE_ADDR, rdev->mc.vram_start); r100_mc_resume()
3806 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { r100_mc_resume()
3807 WREG32(R_00033C_CRTC2_DISPLAY_BASE_ADDR, rdev->mc.vram_start); r100_mc_resume()
3813 if (!(rdev->flags & RADEON_SINGLE_CRTC)) { r100_mc_resume()
3818 void r100_vga_render_disable(struct radeon_device *rdev) r100_vga_render_disable() argument
3826 static void r100_debugfs(struct radeon_device *rdev) r100_debugfs() argument
3830 r = r100_debugfs_mc_info_init(rdev); r100_debugfs()
3832 dev_warn(rdev->dev, "Failed to create r100_mc debugfs file.\n"); r100_debugfs()
3835 static void r100_mc_program(struct radeon_device *rdev) r100_mc_program() argument
3840 r100_mc_stop(rdev, &save); r100_mc_program()
3841 if (rdev->flags & RADEON_IS_AGP) { r100_mc_program()
3843 S_00014C_MC_AGP_START(rdev->mc.gtt_start >> 16) | r100_mc_program()
3844 S_00014C_MC_AGP_TOP(rdev->mc.gtt_end >> 16)); r100_mc_program()
3845 WREG32(R_000170_AGP_BASE, lower_32_bits(rdev->mc.agp_base)); r100_mc_program()
3846 if (rdev->family > CHIP_RV200) r100_mc_program()
3848 upper_32_bits(rdev->mc.agp_base) & 0xff); r100_mc_program()
3852 if (rdev->family > CHIP_RV200) r100_mc_program()
3856 if (r100_mc_wait_for_idle(rdev)) r100_mc_program()
3857 dev_warn(rdev->dev, "Wait for MC idle timeout.\n"); r100_mc_program()
3860 S_000148_MC_FB_START(rdev->mc.vram_start >> 16) | r100_mc_program()
3861 S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16)); r100_mc_program()
3862 r100_mc_resume(rdev, &save); r100_mc_program()
3865 static void r100_clock_startup(struct radeon_device *rdev) r100_clock_startup() argument
3870 radeon_legacy_set_clock_gating(rdev, 1); r100_clock_startup()
3874 if ((rdev->family == CHIP_RV250) || (rdev->family == CHIP_RV280)) r100_clock_startup()
3879 static int r100_startup(struct radeon_device *rdev) r100_startup() argument
3884 r100_set_common_regs(rdev); r100_startup()
3886 r100_mc_program(rdev); r100_startup()
3888 r100_clock_startup(rdev); r100_startup()
3891 r100_enable_bm(rdev); r100_startup()
3892 if (rdev->flags & RADEON_IS_PCI) { r100_startup()
3893 r = r100_pci_gart_enable(rdev); r100_startup()
3899 r = radeon_wb_init(rdev); r100_startup()
3903 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); r100_startup()
3905 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); r100_startup()
3910 if (!rdev->irq.installed) { r100_startup()
3911 r = radeon_irq_kms_init(rdev); r100_startup()
3916 r100_irq_set(rdev); r100_startup()
3917 rdev->config.r100.hdp_cntl = RREG32(RADEON_HOST_PATH_CNTL); r100_startup()
3919 r = r100_cp_init(rdev, 1024 * 1024); r100_startup()
3921 dev_err(rdev->dev, "failed initializing CP (%d).\n", r); r100_startup()
3925 r = radeon_ib_pool_init(rdev); r100_startup()
3927 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); r100_startup()
3934 int r100_resume(struct radeon_device *rdev) r100_resume() argument
3939 if (rdev->flags & RADEON_IS_PCI) r100_resume()
3940 r100_pci_gart_disable(rdev); r100_resume()
3942 r100_clock_startup(rdev); r100_resume()
3944 if (radeon_asic_reset(rdev)) { r100_resume()
3945 dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n", r100_resume()
3950 radeon_combios_asic_init(rdev->ddev); r100_resume()
3952 r100_clock_startup(rdev); r100_resume()
3954 radeon_surface_init(rdev); r100_resume()
3956 rdev->accel_working = true; r100_resume()
3957 r = r100_startup(rdev); r100_resume()
3959 rdev->accel_working = false; r100_resume()
3964 int r100_suspend(struct radeon_device *rdev) r100_suspend() argument
3966 radeon_pm_suspend(rdev); r100_suspend()
3967 r100_cp_disable(rdev); r100_suspend()
3968 radeon_wb_disable(rdev); r100_suspend()
3969 r100_irq_disable(rdev); r100_suspend()
3970 if (rdev->flags & RADEON_IS_PCI) r100_suspend()
3971 r100_pci_gart_disable(rdev); r100_suspend()
3975 void r100_fini(struct radeon_device *rdev) r100_fini() argument
3977 radeon_pm_fini(rdev); r100_fini()
3978 r100_cp_fini(rdev); r100_fini()
3979 radeon_wb_fini(rdev); r100_fini()
3980 radeon_ib_pool_fini(rdev); r100_fini()
3981 radeon_gem_fini(rdev); r100_fini()
3982 if (rdev->flags & RADEON_IS_PCI) r100_fini()
3983 r100_pci_gart_fini(rdev); r100_fini()
3984 radeon_agp_fini(rdev); r100_fini()
3985 radeon_irq_kms_fini(rdev); r100_fini()
3986 radeon_fence_driver_fini(rdev); r100_fini()
3987 radeon_bo_fini(rdev); r100_fini()
3988 radeon_atombios_fini(rdev); r100_fini()
3989 kfree(rdev->bios); r100_fini()
3990 rdev->bios = NULL; r100_fini()
4000 void r100_restore_sanity(struct radeon_device *rdev) r100_restore_sanity() argument
4018 int r100_init(struct radeon_device *rdev) r100_init() argument
4023 r100_debugfs(rdev); r100_init()
4025 r100_vga_render_disable(rdev); r100_init()
4027 radeon_scratch_init(rdev); r100_init()
4029 radeon_surface_init(rdev); r100_init()
4031 r100_restore_sanity(rdev); r100_init()
4034 if (!radeon_get_bios(rdev)) { r100_init()
4035 if (ASIC_IS_AVIVO(rdev)) r100_init()
4038 if (rdev->is_atom_bios) { r100_init()
4039 dev_err(rdev->dev, "Expecting combios for RS400/RS480 GPU\n"); r100_init()
4042 r = radeon_combios_init(rdev); r100_init()
4047 if (radeon_asic_reset(rdev)) { r100_init()
4048 dev_warn(rdev->dev, r100_init()
4054 if (radeon_boot_test_post_card(rdev) == false) r100_init()
4057 r100_errata(rdev); r100_init()
4059 radeon_get_clock_info(rdev->ddev); r100_init()
4061 if (rdev->flags & RADEON_IS_AGP) { r100_init()
4062 r = radeon_agp_init(rdev); r100_init()
4064 radeon_agp_disable(rdev); r100_init()
4068 r100_mc_init(rdev); r100_init()
4070 r = radeon_fence_driver_init(rdev); r100_init()
4074 r = radeon_bo_init(rdev); r100_init()
4077 if (rdev->flags & RADEON_IS_PCI) { r100_init()
4078 r = r100_pci_gart_init(rdev); r100_init()
4082 r100_set_safe_registers(rdev); r100_init()
4085 radeon_pm_init(rdev); r100_init()
4087 rdev->accel_working = true; r100_init()
4088 r = r100_startup(rdev); r100_init()
4091 dev_err(rdev->dev, "Disabling GPU acceleration\n"); r100_init()
4092 r100_cp_fini(rdev); r100_init()
4093 radeon_wb_fini(rdev); r100_init()
4094 radeon_ib_pool_fini(rdev); r100_init()
4095 radeon_irq_kms_fini(rdev); r100_init()
4096 if (rdev->flags & RADEON_IS_PCI) r100_init()
4097 r100_pci_gart_fini(rdev); r100_init()
4098 rdev->accel_working = false; r100_init()
4103 uint32_t r100_mm_rreg_slow(struct radeon_device *rdev, uint32_t reg) r100_mm_rreg_slow() argument
4108 spin_lock_irqsave(&rdev->mmio_idx_lock, flags); r100_mm_rreg_slow()
4109 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX); r100_mm_rreg_slow()
4110 ret = readl(((void __iomem *)rdev->rmmio) + RADEON_MM_DATA); r100_mm_rreg_slow()
4111 spin_unlock_irqrestore(&rdev->mmio_idx_lock, flags); r100_mm_rreg_slow()
4115 void r100_mm_wreg_slow(struct radeon_device *rdev, uint32_t reg, uint32_t v) r100_mm_wreg_slow() argument
4119 spin_lock_irqsave(&rdev->mmio_idx_lock, flags); r100_mm_wreg_slow()
4120 writel(reg, ((void __iomem *)rdev->rmmio) + RADEON_MM_INDEX); r100_mm_wreg_slow()
4121 writel(v, ((void __iomem *)rdev->rmmio) + RADEON_MM_DATA); r100_mm_wreg_slow()
4122 spin_unlock_irqrestore(&rdev->mmio_idx_lock, flags); r100_mm_wreg_slow()
4125 u32 r100_io_rreg(struct radeon_device *rdev, u32 reg) r100_io_rreg() argument
4127 if (reg < rdev->rio_mem_size) r100_io_rreg()
4128 return ioread32(rdev->rio_mem + reg); r100_io_rreg()
4130 iowrite32(reg, rdev->rio_mem + RADEON_MM_INDEX); r100_io_rreg()
4131 return ioread32(rdev->rio_mem + RADEON_MM_DATA); r100_io_rreg()
4135 void r100_io_wreg(struct radeon_device *rdev, u32 reg, u32 v) r100_io_wreg() argument
4137 if (reg < rdev->rio_mem_size) r100_io_wreg()
4138 iowrite32(v, rdev->rio_mem + reg); r100_io_wreg()
4140 iowrite32(reg, rdev->rio_mem + RADEON_MM_INDEX); r100_io_wreg()
4141 iowrite32(v, rdev->rio_mem + RADEON_MM_DATA); r100_io_wreg()
H A Dsi_dpm.c1737 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
1738 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
1739 struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
1742 extern int si_mc_load_microcode(struct radeon_device *rdev);
1743 extern void vce_v1_0_enable_mgcg(struct radeon_device *rdev, bool enable);
1745 static int si_populate_voltage_value(struct radeon_device *rdev,
1748 static int si_get_std_voltage_value(struct radeon_device *rdev,
1751 static int si_write_smc_soft_register(struct radeon_device *rdev,
1753 static int si_convert_power_level_to_smc(struct radeon_device *rdev,
1756 static int si_calculate_sclk_params(struct radeon_device *rdev,
1760 static void si_thermal_start_smc_fan_control(struct radeon_device *rdev);
1761 static void si_fan_ctrl_set_default_mode(struct radeon_device *rdev);
1763 static struct si_power_info *si_get_pi(struct radeon_device *rdev) si_get_pi() argument
1765 struct si_power_info *pi = rdev->pm.dpm.priv; si_get_pi()
1797 static void si_calculate_leakage_for_v_and_t(struct radeon_device *rdev, si_calculate_leakage_for_v_and_t() argument
1825 static void si_calculate_leakage_for_v(struct radeon_device *rdev, si_calculate_leakage_for_v() argument
1836 static void si_update_dte_from_pl2(struct radeon_device *rdev, si_update_dte_from_pl2() argument
1839 u32 p_limit1 = rdev->pm.dpm.tdp_limit; si_update_dte_from_pl2()
1840 u32 p_limit2 = rdev->pm.dpm.near_tdp_limit; si_update_dte_from_pl2()
1866 static void si_initialize_powertune_defaults(struct radeon_device *rdev) si_initialize_powertune_defaults() argument
1868 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_initialize_powertune_defaults()
1869 struct si_power_info *si_pi = si_get_pi(rdev); si_initialize_powertune_defaults()
1872 if (rdev->family == CHIP_TAHITI) { si_initialize_powertune_defaults()
1879 switch (rdev->pdev->device) { si_initialize_powertune_defaults()
1906 } else if (rdev->family == CHIP_PITCAIRN) { si_initialize_powertune_defaults()
1907 switch (rdev->pdev->device) { si_initialize_powertune_defaults()
1943 } else if (rdev->family == CHIP_VERDE) { si_initialize_powertune_defaults()
1948 switch (rdev->pdev->device) { si_initialize_powertune_defaults()
1994 } else if (rdev->family == CHIP_OLAND) { si_initialize_powertune_defaults()
1995 switch (rdev->pdev->device) { si_initialize_powertune_defaults()
2044 } else if (rdev->family == CHIP_HAINAN) { si_initialize_powertune_defaults()
2067 si_update_dte_from_pl2(rdev, &si_pi->dte_data); si_initialize_powertune_defaults()
2091 static u32 si_get_smc_power_scaling_factor(struct radeon_device *rdev) si_get_smc_power_scaling_factor() argument
2096 static u32 si_calculate_cac_wintime(struct radeon_device *rdev) si_calculate_cac_wintime() argument
2103 xclk = radeon_get_xclk(rdev); si_calculate_cac_wintime()
2121 static int si_calculate_adjusted_tdp_limits(struct radeon_device *rdev, si_calculate_adjusted_tdp_limits() argument
2129 if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit) si_calculate_adjusted_tdp_limits()
2132 max_tdp_limit = ((100 + 100) * rdev->pm.dpm.tdp_limit) / 100; si_calculate_adjusted_tdp_limits()
2135 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; si_calculate_adjusted_tdp_limits()
2136 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit_adjusted + (*tdp_limit - rdev->pm.dpm.tdp_limit); si_calculate_adjusted_tdp_limits()
2138 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; si_calculate_adjusted_tdp_limits()
2139 adjustment_delta = rdev->pm.dpm.tdp_limit - *tdp_limit; si_calculate_adjusted_tdp_limits()
2140 if (adjustment_delta < rdev->pm.dpm.near_tdp_limit_adjusted) si_calculate_adjusted_tdp_limits()
2141 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit_adjusted - adjustment_delta; si_calculate_adjusted_tdp_limits()
2154 static int si_populate_smc_tdp_limits(struct radeon_device *rdev, si_populate_smc_tdp_limits() argument
2157 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_populate_smc_tdp_limits()
2158 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_smc_tdp_limits()
2163 struct radeon_ppm_table *ppm = rdev->pm.dpm.dyn_state.ppm_table; si_populate_smc_tdp_limits()
2164 u32 scaling_factor = si_get_smc_power_scaling_factor(rdev); si_populate_smc_tdp_limits()
2174 ret = si_calculate_adjusted_tdp_limits(rdev, si_populate_smc_tdp_limits()
2176 rdev->pm.dpm.tdp_adjustment, si_populate_smc_tdp_limits()
2189 ret = si_copy_bytes_to_smc(rdev, si_populate_smc_tdp_limits()
2208 ret = si_copy_bytes_to_smc(rdev, si_pi->papm_cfg_table_start, si_populate_smc_tdp_limits()
2219 static int si_populate_smc_tdp_limits_2(struct radeon_device *rdev, si_populate_smc_tdp_limits_2() argument
2222 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_populate_smc_tdp_limits_2()
2223 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_smc_tdp_limits_2()
2227 u32 scaling_factor = si_get_smc_power_scaling_factor(rdev); si_populate_smc_tdp_limits_2()
2233 cpu_to_be32(si_scale_power_for_smc(rdev->pm.dpm.near_tdp_limit_adjusted, scaling_factor) * 1000); si_populate_smc_tdp_limits_2()
2235 cpu_to_be32(si_scale_power_for_smc((rdev->pm.dpm.near_tdp_limit_adjusted * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, scaling_factor) * 1000); si_populate_smc_tdp_limits_2()
2237 ret = si_copy_bytes_to_smc(rdev, si_populate_smc_tdp_limits_2()
2251 static u16 si_calculate_power_efficiency_ratio(struct radeon_device *rdev, si_calculate_power_efficiency_ratio() argument
2273 static bool si_should_disable_uvd_powertune(struct radeon_device *rdev, si_should_disable_uvd_powertune() argument
2276 struct si_power_info *si_pi = si_get_pi(rdev); si_should_disable_uvd_powertune()
2285 static int si_populate_power_containment_values(struct radeon_device *rdev, si_populate_power_containment_values() argument
2289 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_populate_power_containment_values()
2290 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_populate_power_containment_values()
2313 disable_uvd_power_tune = si_should_disable_uvd_powertune(rdev, radeon_state); si_populate_power_containment_values()
2348 ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, si_populate_power_containment_values()
2353 ret = si_get_std_voltage_value(rdev, &vddc, &prev_std_vddc); si_populate_power_containment_values()
2357 ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, si_populate_power_containment_values()
2362 ret = si_get_std_voltage_value(rdev, &vddc, &curr_std_vddc); si_populate_power_containment_values()
2366 pwr_efficiency_ratio = si_calculate_power_efficiency_ratio(rdev, si_populate_power_containment_values()
2379 static int si_populate_sq_ramping_values(struct radeon_device *rdev, si_populate_sq_ramping_values() argument
2383 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_populate_sq_ramping_values()
2395 if (rdev->pm.dpm.sq_ramping_threshold == 0) si_populate_sq_ramping_values()
2417 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) && si_populate_sq_ramping_values()
2436 static int si_enable_power_containment(struct radeon_device *rdev, si_enable_power_containment() argument
2440 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_enable_power_containment()
2446 if (!si_should_disable_uvd_powertune(rdev, radeon_new_state)) { si_enable_power_containment()
2447 smc_result = si_send_msg_to_smc(rdev, PPSMC_TDPClampingActive); si_enable_power_containment()
2456 smc_result = si_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive); si_enable_power_containment()
2466 static int si_initialize_smc_dte_tables(struct radeon_device *rdev) si_initialize_smc_dte_tables() argument
2468 struct si_power_info *si_pi = si_get_pi(rdev); si_initialize_smc_dte_tables()
2524 ret = si_copy_bytes_to_smc(rdev, si_pi->dte_table_start, (u8 *)dte_tables, si_initialize_smc_dte_tables()
2531 static int si_get_cac_std_voltage_max_min(struct radeon_device *rdev, si_get_cac_std_voltage_max_min() argument
2534 struct si_power_info *si_pi = si_get_pi(rdev); si_get_cac_std_voltage_max_min()
2536 &rdev->pm.dpm.dyn_state.cac_leakage_table; si_get_cac_std_voltage_max_min()
2576 static int si_init_dte_leakage_table(struct radeon_device *rdev, si_init_dte_leakage_table() argument
2581 struct si_power_info *si_pi = si_get_pi(rdev); si_init_dte_leakage_table()
2589 scaling_factor = si_get_smc_power_scaling_factor(rdev); si_init_dte_leakage_table()
2597 si_calculate_leakage_for_v_and_t(rdev, si_init_dte_leakage_table()
2616 static int si_init_simplified_leakage_table(struct radeon_device *rdev, si_init_simplified_leakage_table() argument
2620 struct si_power_info *si_pi = si_get_pi(rdev); si_init_simplified_leakage_table()
2627 scaling_factor = si_get_smc_power_scaling_factor(rdev); si_init_simplified_leakage_table()
2632 si_calculate_leakage_for_v(rdev, si_init_simplified_leakage_table()
2651 static int si_initialize_smc_cac_tables(struct radeon_device *rdev) si_initialize_smc_cac_tables() argument
2653 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_initialize_smc_cac_tables()
2654 struct si_power_info *si_pi = si_get_pi(rdev); si_initialize_smc_cac_tables()
2660 u32 ticks_per_us = radeon_get_xclk(rdev) / 100; si_initialize_smc_cac_tables()
2673 si_pi->dyn_powertune_data.cac_leakage = rdev->pm.dpm.cac_leakage; si_initialize_smc_cac_tables()
2676 si_pi->dyn_powertune_data.wintime = si_calculate_cac_wintime(rdev); si_initialize_smc_cac_tables()
2681 ret = si_get_cac_std_voltage_max_min(rdev, &vddc_max, &vddc_min); si_initialize_smc_cac_tables()
2691 ret = si_init_dte_leakage_table(rdev, cac_tables, si_initialize_smc_cac_tables()
2695 ret = si_init_simplified_leakage_table(rdev, cac_tables, si_initialize_smc_cac_tables()
2700 load_line_slope = ((u32)rdev->pm.dpm.load_line_slope << SMC_SISLANDS_SCALE_R) / 100; si_initialize_smc_cac_tables()
2716 ret = si_copy_bytes_to_smc(rdev, si_pi->cac_table_start, (u8 *)cac_tables, si_initialize_smc_cac_tables()
2722 ret = si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_ticks_per_us, ticks_per_us); si_initialize_smc_cac_tables()
2735 static int si_program_cac_config_registers(struct radeon_device *rdev, si_program_cac_config_registers() argument
2774 static int si_initialize_hardware_cac_manager(struct radeon_device *rdev) si_initialize_hardware_cac_manager() argument
2776 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_initialize_hardware_cac_manager()
2777 struct si_power_info *si_pi = si_get_pi(rdev); si_initialize_hardware_cac_manager()
2784 ret = si_program_cac_config_registers(rdev, si_pi->lcac_config); si_initialize_hardware_cac_manager()
2787 ret = si_program_cac_config_registers(rdev, si_pi->cac_override); si_initialize_hardware_cac_manager()
2790 ret = si_program_cac_config_registers(rdev, si_pi->cac_weights); si_initialize_hardware_cac_manager()
2797 static int si_enable_smc_cac(struct radeon_device *rdev, si_enable_smc_cac() argument
2801 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_enable_smc_cac()
2802 struct si_power_info *si_pi = si_get_pi(rdev); si_enable_smc_cac()
2808 if (!si_should_disable_uvd_powertune(rdev, radeon_new_state)) { si_enable_smc_cac()
2810 smc_result = si_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable); si_enable_smc_cac()
2815 smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac); si_enable_smc_cac()
2824 smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableDTE); si_enable_smc_cac()
2831 smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_DisableDTE); si_enable_smc_cac()
2833 smc_result = si_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac); si_enable_smc_cac()
2838 smc_result = si_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable); si_enable_smc_cac()
2844 static int si_init_smc_spll_table(struct radeon_device *rdev) si_init_smc_spll_table() argument
2846 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_init_smc_spll_table()
2847 struct si_power_info *si_pi = si_get_pi(rdev); si_init_smc_spll_table()
2865 ret = si_calculate_sclk_params(rdev, sclk, &sclk_params); si_init_smc_spll_table()
2903 ret = si_copy_bytes_to_smc(rdev, si_pi->spll_table_start, si_init_smc_spll_table()
2938 static u16 si_get_lower_of_leakage_and_vce_voltage(struct radeon_device *rdev, si_get_lower_of_leakage_and_vce_voltage() argument
2942 struct si_power_info *si_pi = si_get_pi(rdev); si_get_lower_of_leakage_and_vce_voltage()
2956 static int si_get_vce_clock_voltage(struct radeon_device *rdev, si_get_vce_clock_voltage() argument
2962 &rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table; si_get_vce_clock_voltage()
2983 *voltage = si_get_lower_of_leakage_and_vce_voltage(rdev, *voltage); si_get_vce_clock_voltage()
2988 static void si_apply_state_adjust_rules(struct radeon_device *rdev, si_apply_state_adjust_rules() argument
3004 if (rdev->pdev->vendor == p->chip_vendor && si_apply_state_adjust_rules()
3005 rdev->pdev->device == p->chip_device && si_apply_state_adjust_rules()
3006 rdev->pdev->subsystem_vendor == p->subsys_vendor && si_apply_state_adjust_rules()
3007 rdev->pdev->subsystem_device == p->subsys_device) { si_apply_state_adjust_rules()
3015 if (rdev->pdev->device == 0x6811 && si_apply_state_adjust_rules()
3016 rdev->pdev->revision == 0x81) si_apply_state_adjust_rules()
3020 rps->evclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].evclk; si_apply_state_adjust_rules()
3021 rps->ecclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].ecclk; si_apply_state_adjust_rules()
3022 si_get_vce_clock_voltage(rdev, rps->evclk, rps->ecclk, si_apply_state_adjust_rules()
3029 if ((rdev->pm.dpm.new_active_crtc_count > 1) || si_apply_state_adjust_rules()
3030 ni_dpm_vblank_too_short(rdev)) si_apply_state_adjust_rules()
3038 if (rdev->pm.dpm.ac_power) si_apply_state_adjust_rules()
3039 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; si_apply_state_adjust_rules()
3041 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; si_apply_state_adjust_rules()
3047 if (rdev->pm.dpm.ac_power == false) { si_apply_state_adjust_rules()
3061 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, si_apply_state_adjust_rules()
3063 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, si_apply_state_adjust_rules()
3065 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, si_apply_state_adjust_rules()
3110 if (sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) si_apply_state_adjust_rules()
3111 sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; si_apply_state_adjust_rules()
3112 if (mclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk) si_apply_state_adjust_rules()
3113 mclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].mclk; si_apply_state_adjust_rules()
3161 btc_adjust_clock_combinations(rdev, max_limits, si_apply_state_adjust_rules()
3167 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, si_apply_state_adjust_rules()
3170 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, si_apply_state_adjust_rules()
3173 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, si_apply_state_adjust_rules()
3176 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, si_apply_state_adjust_rules()
3177 rdev->clock.current_dispclk, si_apply_state_adjust_rules()
3182 btc_apply_voltage_delta_rules(rdev, si_apply_state_adjust_rules()
3190 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) si_apply_state_adjust_rules()
3196 static int si_read_smc_soft_register(struct radeon_device *rdev,
3199 struct si_power_info *si_pi = si_get_pi(rdev);
3201 return si_read_smc_sram_dword(rdev,
3207 static int si_write_smc_soft_register(struct radeon_device *rdev, si_write_smc_soft_register() argument
3210 struct si_power_info *si_pi = si_get_pi(rdev); si_write_smc_soft_register()
3212 return si_write_smc_sram_dword(rdev, si_write_smc_soft_register()
3217 static bool si_is_special_1gb_platform(struct radeon_device *rdev) si_is_special_1gb_platform() argument
3238 if ((rdev->pdev->device == 0x6819) && si_is_special_1gb_platform()
3245 static void si_get_leakage_vddc(struct radeon_device *rdev) si_get_leakage_vddc() argument
3247 struct si_power_info *si_pi = si_get_pi(rdev); si_get_leakage_vddc()
3252 ret = radeon_atom_get_leakage_vddc_based_on_leakage_idx(rdev, &vddc, SISLANDS_LEAKAGE_INDEX0 + i); si_get_leakage_vddc()
3264 static int si_get_leakage_voltage_from_leakage_index(struct radeon_device *rdev, si_get_leakage_voltage_from_leakage_index() argument
3267 struct si_power_info *si_pi = si_get_pi(rdev); si_get_leakage_voltage_from_leakage_index()
3291 static void si_set_dpm_event_sources(struct radeon_device *rdev, u32 sources) si_set_dpm_event_sources() argument
3293 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_set_dpm_event_sources()
3326 static void si_enable_auto_throttle_source(struct radeon_device *rdev, si_enable_auto_throttle_source() argument
3330 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_enable_auto_throttle_source()
3335 si_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); si_enable_auto_throttle_source()
3340 si_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); si_enable_auto_throttle_source()
3345 static void si_start_dpm(struct radeon_device *rdev) si_start_dpm() argument
3350 static void si_stop_dpm(struct radeon_device *rdev) si_stop_dpm() argument
3355 static void si_enable_sclk_control(struct radeon_device *rdev, bool enable) si_enable_sclk_control() argument
3365 static int si_notify_hardware_of_thermal_state(struct radeon_device *rdev,
3371 ret = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3380 static void si_notify_hardware_vpu_recovery_event(struct radeon_device *rdev)
3382 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_tdr_is_about_to_happen, true);
3387 static int si_notify_hw_of_powersource(struct radeon_device *rdev, bool ac_power)
3390 return (si_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
3397 static PPSMC_Result si_send_msg_to_smc_with_parameter(struct radeon_device *rdev, si_send_msg_to_smc_with_parameter() argument
3401 return si_send_msg_to_smc(rdev, msg); si_send_msg_to_smc_with_parameter()
3404 static int si_restrict_performance_levels_before_switch(struct radeon_device *rdev) si_restrict_performance_levels_before_switch() argument
3406 if (si_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) si_restrict_performance_levels_before_switch()
3409 return (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ? si_restrict_performance_levels_before_switch()
3413 int si_dpm_force_performance_level(struct radeon_device *rdev, si_dpm_force_performance_level() argument
3416 struct radeon_ps *rps = rdev->pm.dpm.current_ps; si_dpm_force_performance_level()
3421 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, levels) != PPSMC_Result_OK) si_dpm_force_performance_level()
3424 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK) si_dpm_force_performance_level()
3427 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) si_dpm_force_performance_level()
3430 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK) si_dpm_force_performance_level()
3433 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) si_dpm_force_performance_level()
3436 if (si_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, levels) != PPSMC_Result_OK) si_dpm_force_performance_level()
3440 rdev->pm.dpm.forced_level = level; si_dpm_force_performance_level()
3446 static int si_set_boot_state(struct radeon_device *rdev)
3448 return (si_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) == PPSMC_Result_OK) ?
3453 static int si_set_sw_state(struct radeon_device *rdev) si_set_sw_state() argument
3455 return (si_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) == PPSMC_Result_OK) ? si_set_sw_state()
3459 static int si_halt_smc(struct radeon_device *rdev) si_halt_smc() argument
3461 if (si_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK) si_halt_smc()
3464 return (si_wait_for_smc_inactive(rdev) == PPSMC_Result_OK) ? si_halt_smc()
3468 static int si_resume_smc(struct radeon_device *rdev) si_resume_smc() argument
3470 if (si_send_msg_to_smc(rdev, PPSMC_FlushDataCache) != PPSMC_Result_OK) si_resume_smc()
3473 return (si_send_msg_to_smc(rdev, PPSMC_MSG_Resume) == PPSMC_Result_OK) ? si_resume_smc()
3477 static void si_dpm_start_smc(struct radeon_device *rdev) si_dpm_start_smc() argument
3479 si_program_jump_on_start(rdev); si_dpm_start_smc()
3480 si_start_smc(rdev); si_dpm_start_smc()
3481 si_start_smc_clock(rdev); si_dpm_start_smc()
3484 static void si_dpm_stop_smc(struct radeon_device *rdev) si_dpm_stop_smc() argument
3486 si_reset_smc(rdev); si_dpm_stop_smc()
3487 si_stop_smc_clock(rdev); si_dpm_stop_smc()
3490 static int si_process_firmware_header(struct radeon_device *rdev) si_process_firmware_header() argument
3492 struct si_power_info *si_pi = si_get_pi(rdev); si_process_firmware_header()
3496 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3505 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3514 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3523 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3532 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3541 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3550 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3559 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3568 ret = si_read_smc_sram_dword(rdev, si_process_firmware_header()
3580 static void si_read_clock_registers(struct radeon_device *rdev) si_read_clock_registers() argument
3582 struct si_power_info *si_pi = si_get_pi(rdev); si_read_clock_registers()
3601 static void si_enable_thermal_protection(struct radeon_device *rdev, si_enable_thermal_protection() argument
3610 static void si_enable_acpi_power_management(struct radeon_device *rdev) si_enable_acpi_power_management() argument
3616 static int si_enter_ulp_state(struct radeon_device *rdev)
3625 static int si_exit_ulp_state(struct radeon_device *rdev)
3633 for (i = 0; i < rdev->usec_timeout; i++) {
3643 static int si_notify_smc_display_change(struct radeon_device *rdev, si_notify_smc_display_change() argument
3649 return (si_send_msg_to_smc(rdev, msg) == PPSMC_Result_OK) ? si_notify_smc_display_change()
3653 static void si_program_response_times(struct radeon_device *rdev) si_program_response_times() argument
3659 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mvdd_chg_time, 1); si_program_response_times()
3661 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time; si_program_response_times()
3662 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time; si_program_response_times()
3670 reference_clock = radeon_get_xclk(rdev); si_program_response_times()
3676 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly); si_program_response_times()
3677 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly); si_program_response_times()
3678 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly); si_program_response_times()
3679 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA); si_program_response_times()
3682 static void si_program_ds_registers(struct radeon_device *rdev) si_program_ds_registers() argument
3684 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_program_ds_registers()
3694 static void si_program_display_gap(struct radeon_device *rdev) si_program_display_gap() argument
3700 if (rdev->pm.dpm.new_active_crtc_count > 0) si_program_display_gap()
3705 if (rdev->pm.dpm.new_active_crtc_count > 1) si_program_display_gap()
3715 if ((rdev->pm.dpm.new_active_crtc_count > 0) && si_program_display_gap()
3716 (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) { si_program_display_gap()
3718 for (i = 0; i < rdev->num_crtc; i++) { si_program_display_gap()
3719 if (rdev->pm.dpm.new_active_crtcs & (1 << i)) si_program_display_gap()
3722 if (i == rdev->num_crtc) si_program_display_gap()
3736 si_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0); si_program_display_gap()
3739 static void si_enable_spread_spectrum(struct radeon_device *rdev, bool enable) si_enable_spread_spectrum() argument
3741 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_enable_spread_spectrum()
3752 static void si_setup_bsp(struct radeon_device *rdev) si_setup_bsp() argument
3754 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_setup_bsp()
3755 u32 xclk = radeon_get_xclk(rdev); si_setup_bsp()
3776 static void si_program_git(struct radeon_device *rdev) si_program_git() argument
3781 static void si_program_tp(struct radeon_device *rdev) si_program_tp() argument
3801 static void si_program_tpp(struct radeon_device *rdev) si_program_tpp() argument
3806 static void si_program_sstp(struct radeon_device *rdev) si_program_sstp() argument
3811 static void si_enable_display_gap(struct radeon_device *rdev) si_enable_display_gap() argument
3825 static void si_program_vc(struct radeon_device *rdev) si_program_vc() argument
3827 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_program_vc()
3832 static void si_clear_vc(struct radeon_device *rdev) si_clear_vc() argument
3872 static u8 si_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk) si_get_strobe_mode_settings() argument
3874 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_get_strobe_mode_settings()
3892 static int si_upload_firmware(struct radeon_device *rdev) si_upload_firmware() argument
3894 struct si_power_info *si_pi = si_get_pi(rdev); si_upload_firmware()
3897 si_reset_smc(rdev); si_upload_firmware()
3898 si_stop_smc_clock(rdev); si_upload_firmware()
3900 ret = si_load_smc_ucode(rdev, si_pi->sram_end); si_upload_firmware()
3905 static bool si_validate_phase_shedding_tables(struct radeon_device *rdev, si_validate_phase_shedding_tables() argument
3932 void si_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev, si_trim_voltage_table_to_fit_state_table() argument
3949 static int si_get_svi2_voltage_table(struct radeon_device *rdev, si_get_svi2_voltage_table() argument
3970 static int si_construct_voltage_tables(struct radeon_device *rdev) si_construct_voltage_tables() argument
3972 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_construct_voltage_tables()
3973 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_construct_voltage_tables()
3974 struct si_power_info *si_pi = si_get_pi(rdev); si_construct_voltage_tables()
3978 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDC, si_construct_voltage_tables()
3984 si_trim_voltage_table_to_fit_state_table(rdev, si_construct_voltage_tables()
3988 ret = si_get_svi2_voltage_table(rdev, si_construct_voltage_tables()
3989 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, si_construct_voltage_tables()
3998 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDCI, si_construct_voltage_tables()
4004 si_trim_voltage_table_to_fit_state_table(rdev, si_construct_voltage_tables()
4009 ret = si_get_svi2_voltage_table(rdev, si_construct_voltage_tables()
4010 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, si_construct_voltage_tables()
4017 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_MVDDC, si_construct_voltage_tables()
4031 si_trim_voltage_table_to_fit_state_table(rdev, si_construct_voltage_tables()
4037 ret = radeon_atom_get_voltage_table(rdev, VOLTAGE_TYPE_VDDC, si_construct_voltage_tables()
4050 static void si_populate_smc_voltage_table(struct radeon_device *rdev, si_populate_smc_voltage_table() argument
4060 static int si_populate_smc_voltage_tables(struct radeon_device *rdev, si_populate_smc_voltage_tables() argument
4063 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_smc_voltage_tables()
4064 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_populate_smc_voltage_tables()
4065 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_smc_voltage_tables()
4069 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_gpio_id_svc, si_populate_smc_voltage_tables()
4071 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_gpio_id_svd, si_populate_smc_voltage_tables()
4073 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_svi_rework_plat_type, si_populate_smc_voltage_tables()
4077 si_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table); si_populate_smc_voltage_tables()
4090 si_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table); si_populate_smc_voltage_tables()
4098 si_populate_smc_voltage_table(rdev, &si_pi->mvdd_voltage_table, table); si_populate_smc_voltage_tables()
4105 if (si_validate_phase_shedding_tables(rdev, &si_pi->vddc_phase_shed_table, si_populate_smc_voltage_tables()
4106 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table)) { si_populate_smc_voltage_tables()
4107 si_populate_smc_voltage_table(rdev, &si_pi->vddc_phase_shed_table, table); si_populate_smc_voltage_tables()
4112 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_phase_shedding_delay, si_populate_smc_voltage_tables()
4123 static int si_populate_voltage_value(struct radeon_device *rdev, si_populate_voltage_value() argument
4143 static int si_populate_mvdd_value(struct radeon_device *rdev, u32 mclk, si_populate_mvdd_value() argument
4146 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_mvdd_value()
4147 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_mvdd_value()
4160 static int si_get_std_voltage_value(struct radeon_device *rdev, si_get_std_voltage_value() argument
4168 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries) { si_get_std_voltage_value()
4169 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_NEW_CAC_VOLTAGE) { si_get_std_voltage_value()
4170 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL) si_get_std_voltage_value()
4173 for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) { si_get_std_voltage_value()
4175 (u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) { si_get_std_voltage_value()
4177 if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count) si_get_std_voltage_value()
4179 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc; si_get_std_voltage_value()
4182 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc; si_get_std_voltage_value()
4188 for (v_index = 0; (u32)v_index < rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; v_index++) { si_get_std_voltage_value()
4190 (u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) { si_get_std_voltage_value()
4192 if ((u32)v_index < rdev->pm.dpm.dyn_state.cac_leakage_table.count) si_get_std_voltage_value()
4194 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc; si_get_std_voltage_value()
4197 rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc; si_get_std_voltage_value()
4203 if ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count) si_get_std_voltage_value()
4204 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc; si_get_std_voltage_value()
4211 static int si_populate_std_voltage_value(struct radeon_device *rdev, si_populate_std_voltage_value() argument
4221 static int si_populate_phase_shedding_value(struct radeon_device *rdev, si_populate_phase_shedding_value() argument
4240 static int si_init_arb_table_index(struct radeon_device *rdev) si_init_arb_table_index() argument
4242 struct si_power_info *si_pi = si_get_pi(rdev); si_init_arb_table_index()
4246 ret = si_read_smc_sram_dword(rdev, si_pi->arb_table_start, &tmp, si_pi->sram_end); si_init_arb_table_index()
4253 return si_write_smc_sram_dword(rdev, si_pi->arb_table_start, tmp, si_pi->sram_end); si_init_arb_table_index()
4256 static int si_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev) si_initial_switch_from_arb_f0_to_f1() argument
4258 return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1); si_initial_switch_from_arb_f0_to_f1()
4261 static int si_reset_to_default(struct radeon_device *rdev) si_reset_to_default() argument
4263 return (si_send_msg_to_smc(rdev, PPSMC_MSG_ResetToDefaults) == PPSMC_Result_OK) ? si_reset_to_default()
4267 static int si_force_switch_to_arb_f0(struct radeon_device *rdev) si_force_switch_to_arb_f0() argument
4269 struct si_power_info *si_pi = si_get_pi(rdev); si_force_switch_to_arb_f0()
4273 ret = si_read_smc_sram_dword(rdev, si_pi->arb_table_start, si_force_switch_to_arb_f0()
4283 return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0); si_force_switch_to_arb_f0()
4286 static u32 si_calculate_memory_refresh_rate(struct radeon_device *rdev, si_calculate_memory_refresh_rate() argument
4305 static int si_populate_memory_timing_parameters(struct radeon_device *rdev, si_populate_memory_timing_parameters() argument
4314 (u8)si_calculate_memory_refresh_rate(rdev, pl->sclk); si_populate_memory_timing_parameters()
4316 radeon_atom_set_engine_dram_timings(rdev, si_populate_memory_timing_parameters()
4331 static int si_do_program_memory_timing_parameters(struct radeon_device *rdev, si_do_program_memory_timing_parameters() argument
4335 struct si_power_info *si_pi = si_get_pi(rdev); si_do_program_memory_timing_parameters()
4341 ret = si_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs); si_do_program_memory_timing_parameters()
4344 ret = si_copy_bytes_to_smc(rdev, si_do_program_memory_timing_parameters()
4358 static int si_program_memory_timing_parameters(struct radeon_device *rdev, si_program_memory_timing_parameters() argument
4361 return si_do_program_memory_timing_parameters(rdev, radeon_new_state, si_program_memory_timing_parameters()
4365 static int si_populate_initial_mvdd_value(struct radeon_device *rdev, si_populate_initial_mvdd_value() argument
4368 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_initial_mvdd_value()
4369 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_initial_mvdd_value()
4372 return si_populate_voltage_value(rdev, &si_pi->mvdd_voltage_table, si_populate_initial_mvdd_value()
4378 static int si_populate_smc_initial_state(struct radeon_device *rdev, si_populate_smc_initial_state() argument
4383 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_smc_initial_state()
4384 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_populate_smc_initial_state()
4385 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_smc_initial_state()
4432 ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, si_populate_smc_initial_state()
4439 ret = si_get_std_voltage_value(rdev, si_populate_smc_initial_state()
4443 si_populate_std_voltage_value(rdev, std_vddc, si_populate_smc_initial_state()
4449 si_populate_voltage_value(rdev, si_populate_smc_initial_state()
4455 si_populate_phase_shedding_value(rdev, si_populate_smc_initial_state()
4456 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, si_populate_smc_initial_state()
4462 si_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd); si_populate_smc_initial_state()
4473 si_get_strobe_mode_settings(rdev, si_populate_smc_initial_state()
4501 static int si_populate_smc_acpi_state(struct radeon_device *rdev, si_populate_smc_acpi_state() argument
4504 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_smc_acpi_state()
4505 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_populate_smc_acpi_state()
4506 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_smc_acpi_state()
4526 ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, si_populate_smc_acpi_state()
4531 ret = si_get_std_voltage_value(rdev, si_populate_smc_acpi_state()
4534 si_populate_std_voltage_value(rdev, std_vddc, si_populate_smc_acpi_state()
4541 si_populate_phase_shedding_value(rdev, si_populate_smc_acpi_state()
4542 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, si_populate_smc_acpi_state()
4549 ret = si_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, si_populate_smc_acpi_state()
4554 ret = si_get_std_voltage_value(rdev, si_populate_smc_acpi_state()
4558 si_populate_std_voltage_value(rdev, std_vddc, si_populate_smc_acpi_state()
4562 table->ACPIState.levels[0].gen2PCIE = (u8)r600_get_pcie_gen_support(rdev, si_populate_smc_acpi_state()
4568 si_populate_phase_shedding_value(rdev, si_populate_smc_acpi_state()
4569 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, si_populate_smc_acpi_state()
4578 si_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table, si_populate_smc_acpi_state()
4622 si_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); si_populate_smc_acpi_state()
4642 static int si_populate_ulv_state(struct radeon_device *rdev, si_populate_ulv_state() argument
4645 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_populate_ulv_state()
4646 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_ulv_state()
4651 ret = si_convert_power_level_to_smc(rdev, &ulv->pl, si_populate_ulv_state()
4673 static int si_program_ulv_memory_timing_parameters(struct radeon_device *rdev) si_program_ulv_memory_timing_parameters() argument
4675 struct si_power_info *si_pi = si_get_pi(rdev); si_program_ulv_memory_timing_parameters()
4680 ret = si_populate_memory_timing_parameters(rdev, &ulv->pl, si_program_ulv_memory_timing_parameters()
4685 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_ulv_volt_change_delay, si_program_ulv_memory_timing_parameters()
4688 ret = si_copy_bytes_to_smc(rdev, si_program_ulv_memory_timing_parameters()
4699 static void si_get_mvdd_configuration(struct radeon_device *rdev) si_get_mvdd_configuration() argument
4701 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_get_mvdd_configuration()
4706 static int si_init_smc_table(struct radeon_device *rdev) si_init_smc_table() argument
4708 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_init_smc_table()
4709 struct si_power_info *si_pi = si_get_pi(rdev); si_init_smc_table()
4710 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps; si_init_smc_table()
4717 si_populate_smc_voltage_tables(rdev, table); si_init_smc_table()
4719 switch (rdev->pm.int_thermal_type) { si_init_smc_table()
4732 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) si_init_smc_table()
4735 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) { si_init_smc_table()
4736 if ((rdev->pdev->device != 0x6818) && (rdev->pdev->device != 0x6819)) si_init_smc_table()
4740 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) si_init_smc_table()
4746 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REVERT_GPIO5_POLARITY) si_init_smc_table()
4749 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_VRHOT_GPIO_CONFIGURABLE) { si_init_smc_table()
4751 vr_hot_gpio = rdev->pm.dpm.backbias_response_time; si_init_smc_table()
4752 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_vr_hot_gpio, si_init_smc_table()
4756 ret = si_populate_smc_initial_state(rdev, radeon_boot_state, table); si_init_smc_table()
4760 ret = si_populate_smc_acpi_state(rdev, table); si_init_smc_table()
4766 ret = si_do_program_memory_timing_parameters(rdev, radeon_boot_state, si_init_smc_table()
4772 ret = si_populate_ulv_state(rdev, &table->ULVState); si_init_smc_table()
4776 ret = si_program_ulv_memory_timing_parameters(rdev); si_init_smc_table()
4783 lane_width = radeon_get_pcie_lanes(rdev); si_init_smc_table()
4784 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_non_ulv_pcie_link_width, lane_width); si_init_smc_table()
4789 return si_copy_bytes_to_smc(rdev, si_pi->state_table_start, si_init_smc_table()
4794 static int si_calculate_sclk_params(struct radeon_device *rdev, si_calculate_sclk_params() argument
4798 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_calculate_sclk_params()
4799 struct si_power_info *si_pi = si_get_pi(rdev); si_calculate_sclk_params()
4808 u32 reference_clock = rdev->clock.spll.reference_freq; si_calculate_sclk_params()
4813 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, si_calculate_sclk_params()
4839 if (radeon_atombios_get_asic_ss_info(rdev, &ss, si_calculate_sclk_params()
4864 static int si_populate_sclk_value(struct radeon_device *rdev, si_populate_sclk_value() argument
4871 ret = si_calculate_sclk_params(rdev, engine_clock, &sclk_tmp); si_populate_sclk_value()
4885 static int si_populate_mclk_value(struct radeon_device *rdev, si_populate_mclk_value() argument
4892 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_mclk_value()
4893 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_mclk_value()
4906 ret = radeon_atom_get_memory_pll_dividers(rdev, memory_clock, strobe_mode, &mpll_param); si_populate_mclk_value()
4930 u32 reference_clock = rdev->clock.mpll.reference_freq; si_populate_mclk_value()
4939 if (radeon_atombios_get_asic_ss_info(rdev, &ss, si_populate_mclk_value()
4974 static void si_populate_smc_sp(struct radeon_device *rdev, si_populate_smc_sp() argument
4979 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_smc_sp()
4989 static int si_convert_power_level_to_smc(struct radeon_device *rdev, si_convert_power_level_to_smc() argument
4993 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_convert_power_level_to_smc()
4994 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_convert_power_level_to_smc()
4995 struct si_power_info *si_pi = si_get_pi(rdev); si_convert_power_level_to_smc()
5007 ret = si_populate_sclk_value(rdev, pl->sclk, &level->sclk); si_convert_power_level_to_smc()
5017 (rdev->pm.dpm.new_active_crtc_count <= 2)) { si_convert_power_level_to_smc()
5031 level->strobeMode = si_get_strobe_mode_settings(rdev, pl->mclk); si_convert_power_level_to_smc()
5043 level->strobeMode = si_get_strobe_mode_settings(rdev, si_convert_power_level_to_smc()
5049 ret = si_populate_mclk_value(rdev, si_convert_power_level_to_smc()
5057 ret = si_populate_voltage_value(rdev, si_convert_power_level_to_smc()
5064 ret = si_get_std_voltage_value(rdev, &level->vddc, &std_vddc); si_convert_power_level_to_smc()
5068 ret = si_populate_std_voltage_value(rdev, std_vddc, si_convert_power_level_to_smc()
5074 ret = si_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table, si_convert_power_level_to_smc()
5081 ret = si_populate_phase_shedding_value(rdev, si_convert_power_level_to_smc()
5082 &rdev->pm.dpm.dyn_state.phase_shedding_limits_table, si_convert_power_level_to_smc()
5093 ret = si_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); si_convert_power_level_to_smc()
5098 static int si_populate_smc_t(struct radeon_device *rdev, si_populate_smc_t() argument
5102 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_populate_smc_t()
5147 static int si_disable_ulv(struct radeon_device *rdev) si_disable_ulv() argument
5149 struct si_power_info *si_pi = si_get_pi(rdev); si_disable_ulv()
5153 return (si_send_msg_to_smc(rdev, PPSMC_MSG_DisableULV) == PPSMC_Result_OK) ? si_disable_ulv()
5159 static bool si_is_state_ulv_compatible(struct radeon_device *rdev, si_is_state_ulv_compatible() argument
5162 const struct si_power_info *si_pi = si_get_pi(rdev); si_is_state_ulv_compatible()
5172 for (i = 0; i < rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count; i++) { si_is_state_ulv_compatible()
5173 if (rdev->clock.current_dispclk <= si_is_state_ulv_compatible()
5174 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].clk) { si_is_state_ulv_compatible()
5176 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].v) si_is_state_ulv_compatible()
5187 static int si_set_power_state_conditionally_enable_ulv(struct radeon_device *rdev, si_set_power_state_conditionally_enable_ulv() argument
5190 const struct si_power_info *si_pi = si_get_pi(rdev); si_set_power_state_conditionally_enable_ulv()
5194 if (si_is_state_ulv_compatible(rdev, radeon_new_state)) si_set_power_state_conditionally_enable_ulv()
5195 return (si_send_msg_to_smc(rdev, PPSMC_MSG_EnableULV) == PPSMC_Result_OK) ? si_set_power_state_conditionally_enable_ulv()
5201 static int si_convert_power_state_to_smc(struct radeon_device *rdev, si_convert_power_state_to_smc() argument
5205 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_convert_power_state_to_smc()
5206 struct ni_power_info *ni_pi = ni_get_pi(rdev); si_convert_power_state_to_smc()
5207 struct si_power_info *si_pi = si_get_pi(rdev); si_convert_power_state_to_smc()
5240 ret = si_convert_power_level_to_smc(rdev, &state->performance_levels[i], si_convert_power_state_to_smc()
5264 si_write_smc_soft_register(rdev, si_convert_power_state_to_smc()
5268 si_populate_smc_sp(rdev, radeon_state, smc_state); si_convert_power_state_to_smc()
5270 ret = si_populate_power_containment_values(rdev, radeon_state, smc_state); si_convert_power_state_to_smc()
5274 ret = si_populate_sq_ramping_values(rdev, radeon_state, smc_state); si_convert_power_state_to_smc()
5278 return si_populate_smc_t(rdev, radeon_state, smc_state); si_convert_power_state_to_smc()
5281 static int si_upload_sw_state(struct radeon_device *rdev, si_upload_sw_state() argument
5284 struct si_power_info *si_pi = si_get_pi(rdev); si_upload_sw_state()
5296 ret = si_convert_power_state_to_smc(rdev, radeon_new_state, smc_state); si_upload_sw_state()
5300 ret = si_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, si_upload_sw_state()
5306 static int si_upload_ulv_state(struct radeon_device *rdev) si_upload_ulv_state() argument
5308 struct si_power_info *si_pi = si_get_pi(rdev); si_upload_ulv_state()
5320 ret = si_populate_ulv_state(rdev, smc_state); si_upload_ulv_state()
5322 ret = si_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, si_upload_ulv_state()
5329 static int si_upload_smc_data(struct radeon_device *rdev) si_upload_smc_data() argument
5334 if (rdev->pm.dpm.new_active_crtc_count == 0) si_upload_smc_data()
5337 for (i = 0; i < rdev->num_crtc; i++) { si_upload_smc_data()
5338 if (rdev->pm.dpm.new_active_crtcs & (1 << i)) { si_upload_smc_data()
5339 radeon_crtc = rdev->mode_info.crtcs[i]; si_upload_smc_data()
5350 if (si_write_smc_soft_register(rdev, si_upload_smc_data()
5355 if (si_write_smc_soft_register(rdev, si_upload_smc_data()
5360 if (si_write_smc_soft_register(rdev, si_upload_smc_data()
5368 static int si_set_mc_special_registers(struct radeon_device *rdev, si_set_mc_special_registers() argument
5371 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_set_mc_special_registers()
5545 static int si_initialize_mc_reg_table(struct radeon_device *rdev) si_initialize_mc_reg_table() argument
5547 struct si_power_info *si_pi = si_get_pi(rdev); si_initialize_mc_reg_table()
5550 u8 module_index = rv770_get_memory_module_index(rdev); si_initialize_mc_reg_table()
5572 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table); si_initialize_mc_reg_table()
5582 ret = si_set_mc_special_registers(rdev, si_table); si_initialize_mc_reg_table()
5595 static void si_populate_mc_reg_addresses(struct radeon_device *rdev, si_populate_mc_reg_addresses() argument
5598 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_mc_reg_addresses()
5629 static void si_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, si_convert_mc_reg_table_entry_to_smc() argument
5633 struct si_power_info *si_pi = si_get_pi(rdev); si_convert_mc_reg_table_entry_to_smc()
5649 static void si_convert_mc_reg_table_to_smc(struct radeon_device *rdev, si_convert_mc_reg_table_to_smc() argument
5657 si_convert_mc_reg_table_entry_to_smc(rdev, si_convert_mc_reg_table_to_smc()
5663 static int si_populate_mc_reg_table(struct radeon_device *rdev, si_populate_mc_reg_table() argument
5667 struct si_power_info *si_pi = si_get_pi(rdev); si_populate_mc_reg_table()
5673 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_seq_index, 1); si_populate_mc_reg_table()
5675 si_populate_mc_reg_addresses(rdev, smc_mc_reg_table); si_populate_mc_reg_table()
5677 si_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0], si_populate_mc_reg_table()
5686 si_convert_mc_reg_table_entry_to_smc(rdev, &ulv->pl, si_populate_mc_reg_table()
5694 si_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, smc_mc_reg_table); si_populate_mc_reg_table()
5696 return si_copy_bytes_to_smc(rdev, si_pi->mc_reg_table_start, si_populate_mc_reg_table()
5701 static int si_upload_mc_reg_table(struct radeon_device *rdev, si_upload_mc_reg_table() argument
5705 struct si_power_info *si_pi = si_get_pi(rdev); si_upload_mc_reg_table()
5713 si_convert_mc_reg_table_to_smc(rdev, radeon_new_state, smc_mc_reg_table); si_upload_mc_reg_table()
5716 return si_copy_bytes_to_smc(rdev, address, si_upload_mc_reg_table()
5723 static void si_enable_voltage_control(struct radeon_device *rdev, bool enable) si_enable_voltage_control() argument
5731 static enum radeon_pcie_gen si_get_maximum_link_speed(struct radeon_device *rdev, si_get_maximum_link_speed() argument
5746 static u16 si_get_current_pcie_speed(struct radeon_device *rdev) si_get_current_pcie_speed() argument
5756 static void si_request_link_speed_change_before_state_change(struct radeon_device *rdev, si_request_link_speed_change_before_state_change() argument
5760 struct si_power_info *si_pi = si_get_pi(rdev); si_request_link_speed_change_before_state_change()
5761 enum radeon_pcie_gen target_link_speed = si_get_maximum_link_speed(rdev, radeon_new_state); si_request_link_speed_change_before_state_change()
5765 current_link_speed = si_get_maximum_link_speed(rdev, radeon_current_state); si_request_link_speed_change_before_state_change()
5775 if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN3, false) == 0) si_request_link_speed_change_before_state_change()
5781 if (radeon_acpi_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, false) == 0) si_request_link_speed_change_before_state_change()
5785 si_pi->force_pcie_gen = si_get_current_pcie_speed(rdev); si_request_link_speed_change_before_state_change()
5794 static void si_notify_link_speed_change_after_state_change(struct radeon_device *rdev, si_notify_link_speed_change_after_state_change() argument
5798 struct si_power_info *si_pi = si_get_pi(rdev); si_notify_link_speed_change_after_state_change()
5799 enum radeon_pcie_gen target_link_speed = si_get_maximum_link_speed(rdev, radeon_new_state); si_notify_link_speed_change_after_state_change()
5811 (si_get_current_pcie_speed(rdev) > 0)) si_notify_link_speed_change_after_state_change()
5815 radeon_acpi_pcie_performance_request(rdev, request, false); si_notify_link_speed_change_after_state_change()
5821 static int si_ds_request(struct radeon_device *rdev,
5824 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
5828 return (si_send_msg_to_smc(rdev, PPSMC_MSG_CancelThrottleOVRDSCLKDS) ==
5832 return (si_send_msg_to_smc(rdev, PPSMC_MSG_ThrottleOVRDSCLKDS) ==
5839 static void si_set_max_cu_value(struct radeon_device *rdev) si_set_max_cu_value() argument
5841 struct si_power_info *si_pi = si_get_pi(rdev); si_set_max_cu_value()
5843 if (rdev->family == CHIP_VERDE) { si_set_max_cu_value()
5844 switch (rdev->pdev->device) { si_set_max_cu_value()
5880 static int si_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev, si_patch_single_dependency_table_based_on_leakage() argument
5889 switch (si_get_leakage_voltage_from_leakage_index(rdev, si_patch_single_dependency_table_based_on_leakage()
5911 static int si_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev) si_patch_dependency_tables_based_on_leakage() argument
5915 ret = si_patch_single_dependency_table_based_on_leakage(rdev, si_patch_dependency_tables_based_on_leakage()
5916 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk); si_patch_dependency_tables_based_on_leakage()
5917 ret = si_patch_single_dependency_table_based_on_leakage(rdev, si_patch_dependency_tables_based_on_leakage()
5918 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk); si_patch_dependency_tables_based_on_leakage()
5919 ret = si_patch_single_dependency_table_based_on_leakage(rdev, si_patch_dependency_tables_based_on_leakage()
5920 &rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk); si_patch_dependency_tables_based_on_leakage()
5924 static void si_set_pcie_lane_width_in_smc(struct radeon_device *rdev, si_set_pcie_lane_width_in_smc() argument
5935 radeon_set_pcie_lanes(rdev, new_lane_width); si_set_pcie_lane_width_in_smc()
5936 lane_width = radeon_get_pcie_lanes(rdev); si_set_pcie_lane_width_in_smc()
5937 si_write_smc_soft_register(rdev, SI_SMC_SOFT_REGISTER_non_ulv_pcie_link_width, lane_width); si_set_pcie_lane_width_in_smc()
5941 static void si_set_vce_clock(struct radeon_device *rdev, si_set_vce_clock() argument
5949 vce_v1_0_enable_mgcg(rdev, false); si_set_vce_clock()
5951 vce_v1_0_enable_mgcg(rdev, true); si_set_vce_clock()
5952 radeon_set_vce_clocks(rdev, new_rps->evclk, new_rps->ecclk); si_set_vce_clock()
5956 void si_dpm_setup_asic(struct radeon_device *rdev) si_dpm_setup_asic() argument
5960 r = si_mc_load_microcode(rdev); si_dpm_setup_asic()
5963 rv770_get_memory_type(rdev); si_dpm_setup_asic()
5964 si_read_clock_registers(rdev); si_dpm_setup_asic()
5965 si_enable_acpi_power_management(rdev); si_dpm_setup_asic()
5968 static int si_thermal_enable_alert(struct radeon_device *rdev, si_thermal_enable_alert() argument
5978 rdev->irq.dpm_thermal = false; si_thermal_enable_alert()
5979 result = si_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt); si_thermal_enable_alert()
5987 rdev->irq.dpm_thermal = true; si_thermal_enable_alert()
5993 static int si_thermal_set_temperature_range(struct radeon_device *rdev, si_thermal_set_temperature_range() argument
6012 rdev->pm.dpm.thermal.min_temp = low_temp; si_thermal_set_temperature_range()
6013 rdev->pm.dpm.thermal.max_temp = high_temp; si_thermal_set_temperature_range()
6018 static void si_fan_ctrl_set_static_mode(struct radeon_device *rdev, u32 mode) si_fan_ctrl_set_static_mode() argument
6020 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_set_static_mode()
6040 static int si_thermal_setup_fan_table(struct radeon_device *rdev) si_thermal_setup_fan_table() argument
6042 struct si_power_info *si_pi = si_get_pi(rdev); si_thermal_setup_fan_table()
6052 rdev->pm.dpm.fan.ucode_fan_control = false; si_thermal_setup_fan_table()
6059 rdev->pm.dpm.fan.ucode_fan_control = false; si_thermal_setup_fan_table()
6063 tmp64 = (u64)rdev->pm.dpm.fan.pwm_min * duty100; si_thermal_setup_fan_table()
6067 t_diff1 = rdev->pm.dpm.fan.t_med - rdev->pm.dpm.fan.t_min; si_thermal_setup_fan_table()
6068 t_diff2 = rdev->pm.dpm.fan.t_high - rdev->pm.dpm.fan.t_med; si_thermal_setup_fan_table()
6070 pwm_diff1 = rdev->pm.dpm.fan.pwm_med - rdev->pm.dpm.fan.pwm_min; si_thermal_setup_fan_table()
6071 pwm_diff2 = rdev->pm.dpm.fan.pwm_high - rdev->pm.dpm.fan.pwm_med; si_thermal_setup_fan_table()
6076 fan_table.temp_min = cpu_to_be16((50 + rdev->pm.dpm.fan.t_min) / 100); si_thermal_setup_fan_table()
6077 fan_table.temp_med = cpu_to_be16((50 + rdev->pm.dpm.fan.t_med) / 100); si_thermal_setup_fan_table()
6078 fan_table.temp_max = cpu_to_be16((50 + rdev->pm.dpm.fan.t_max) / 100); si_thermal_setup_fan_table()
6085 fan_table.hys_down = cpu_to_be16(rdev->pm.dpm.fan.t_hyst); si_thermal_setup_fan_table()
6093 reference_clock = radeon_get_xclk(rdev); si_thermal_setup_fan_table()
6095 fan_table.refresh_period = cpu_to_be32((rdev->pm.dpm.fan.cycle_delay * si_thermal_setup_fan_table()
6103 ret = si_copy_bytes_to_smc(rdev, si_thermal_setup_fan_table()
6111 rdev->pm.dpm.fan.ucode_fan_control = false; si_thermal_setup_fan_table()
6117 static int si_fan_ctrl_start_smc_fan_control(struct radeon_device *rdev) si_fan_ctrl_start_smc_fan_control() argument
6119 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_start_smc_fan_control()
6122 ret = si_send_msg_to_smc(rdev, PPSMC_StartFanControl); si_fan_ctrl_start_smc_fan_control()
6131 static int si_fan_ctrl_stop_smc_fan_control(struct radeon_device *rdev) si_fan_ctrl_stop_smc_fan_control() argument
6133 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_stop_smc_fan_control()
6136 ret = si_send_msg_to_smc(rdev, PPSMC_StopFanControl); si_fan_ctrl_stop_smc_fan_control()
6146 int si_fan_ctrl_get_fan_speed_percent(struct radeon_device *rdev, si_fan_ctrl_get_fan_speed_percent() argument
6152 if (rdev->pm.no_fan) si_fan_ctrl_get_fan_speed_percent()
6171 int si_fan_ctrl_set_fan_speed_percent(struct radeon_device *rdev, si_fan_ctrl_set_fan_speed_percent() argument
6174 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_set_fan_speed_percent()
6179 if (rdev->pm.no_fan) si_fan_ctrl_set_fan_speed_percent()
6204 void si_fan_ctrl_set_mode(struct radeon_device *rdev, u32 mode) si_fan_ctrl_set_mode() argument
6208 if (rdev->pm.dpm.fan.ucode_fan_control) si_fan_ctrl_set_mode()
6209 si_fan_ctrl_stop_smc_fan_control(rdev); si_fan_ctrl_set_mode()
6210 si_fan_ctrl_set_static_mode(rdev, mode); si_fan_ctrl_set_mode()
6213 if (rdev->pm.dpm.fan.ucode_fan_control) si_fan_ctrl_set_mode()
6214 si_thermal_start_smc_fan_control(rdev); si_fan_ctrl_set_mode()
6216 si_fan_ctrl_set_default_mode(rdev); si_fan_ctrl_set_mode()
6220 u32 si_fan_ctrl_get_mode(struct radeon_device *rdev) si_fan_ctrl_get_mode() argument
6222 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_get_mode()
6233 static int si_fan_ctrl_get_fan_speed_rpm(struct radeon_device *rdev,
6237 u32 xclk = radeon_get_xclk(rdev);
6239 if (rdev->pm.no_fan)
6242 if (rdev->pm.fan_pulses_per_revolution == 0)
6254 static int si_fan_ctrl_set_fan_speed_rpm(struct radeon_device *rdev,
6258 u32 xclk = radeon_get_xclk(rdev);
6260 if (rdev->pm.no_fan)
6263 if (rdev->pm.fan_pulses_per_revolution == 0)
6266 if ((speed < rdev->pm.fan_min_rpm) ||
6267 (speed > rdev->pm.fan_max_rpm))
6270 if (rdev->pm.dpm.fan.ucode_fan_control)
6271 si_fan_ctrl_stop_smc_fan_control(rdev);
6278 si_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC_RPM);
6284 static void si_fan_ctrl_set_default_mode(struct radeon_device *rdev) si_fan_ctrl_set_default_mode() argument
6286 struct si_power_info *si_pi = si_get_pi(rdev); si_fan_ctrl_set_default_mode()
6301 static void si_thermal_start_smc_fan_control(struct radeon_device *rdev) si_thermal_start_smc_fan_control() argument
6303 if (rdev->pm.dpm.fan.ucode_fan_control) { si_thermal_start_smc_fan_control()
6304 si_fan_ctrl_start_smc_fan_control(rdev); si_thermal_start_smc_fan_control()
6305 si_fan_ctrl_set_static_mode(rdev, FDO_PWM_MODE_STATIC); si_thermal_start_smc_fan_control()
6309 static void si_thermal_initialize(struct radeon_device *rdev) si_thermal_initialize() argument
6313 if (rdev->pm.fan_pulses_per_revolution) { si_thermal_initialize()
6315 tmp |= EDGE_PER_REV(rdev->pm.fan_pulses_per_revolution -1); si_thermal_initialize()
6324 static int si_thermal_start_thermal_controller(struct radeon_device *rdev) si_thermal_start_thermal_controller() argument
6328 si_thermal_initialize(rdev); si_thermal_start_thermal_controller()
6329 ret = si_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); si_thermal_start_thermal_controller()
6332 ret = si_thermal_enable_alert(rdev, true); si_thermal_start_thermal_controller()
6335 if (rdev->pm.dpm.fan.ucode_fan_control) { si_thermal_start_thermal_controller()
6336 ret = si_halt_smc(rdev); si_thermal_start_thermal_controller()
6339 ret = si_thermal_setup_fan_table(rdev); si_thermal_start_thermal_controller()
6342 ret = si_resume_smc(rdev); si_thermal_start_thermal_controller()
6345 si_thermal_start_smc_fan_control(rdev); si_thermal_start_thermal_controller()
6351 static void si_thermal_stop_thermal_controller(struct radeon_device *rdev) si_thermal_stop_thermal_controller() argument
6353 if (!rdev->pm.no_fan) { si_thermal_stop_thermal_controller()
6354 si_fan_ctrl_set_default_mode(rdev); si_thermal_stop_thermal_controller()
6355 si_fan_ctrl_stop_smc_fan_control(rdev); si_thermal_stop_thermal_controller()
6359 int si_dpm_enable(struct radeon_device *rdev) si_dpm_enable() argument
6361 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_dpm_enable()
6362 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_enable()
6363 struct si_power_info *si_pi = si_get_pi(rdev); si_dpm_enable()
6364 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; si_dpm_enable()
6367 if (si_is_smc_running(rdev)) si_dpm_enable()
6370 si_enable_voltage_control(rdev, true); si_dpm_enable()
6372 si_get_mvdd_configuration(rdev); si_dpm_enable()
6374 ret = si_construct_voltage_tables(rdev); si_dpm_enable()
6381 ret = si_initialize_mc_reg_table(rdev); si_dpm_enable()
6386 si_enable_spread_spectrum(rdev, true); si_dpm_enable()
6388 si_enable_thermal_protection(rdev, true); si_dpm_enable()
6389 si_setup_bsp(rdev); si_dpm_enable()
6390 si_program_git(rdev); si_dpm_enable()
6391 si_program_tp(rdev); si_dpm_enable()
6392 si_program_tpp(rdev); si_dpm_enable()
6393 si_program_sstp(rdev); si_dpm_enable()
6394 si_enable_display_gap(rdev); si_dpm_enable()
6395 si_program_vc(rdev); si_dpm_enable()
6396 ret = si_upload_firmware(rdev); si_dpm_enable()
6401 ret = si_process_firmware_header(rdev); si_dpm_enable()
6406 ret = si_initial_switch_from_arb_f0_to_f1(rdev); si_dpm_enable()
6411 ret = si_init_smc_table(rdev); si_dpm_enable()
6416 ret = si_init_smc_spll_table(rdev); si_dpm_enable()
6421 ret = si_init_arb_table_index(rdev); si_dpm_enable()
6427 ret = si_populate_mc_reg_table(rdev, boot_ps); si_dpm_enable()
6433 ret = si_initialize_smc_cac_tables(rdev); si_dpm_enable()
6438 ret = si_initialize_hardware_cac_manager(rdev); si_dpm_enable()
6443 ret = si_initialize_smc_dte_tables(rdev); si_dpm_enable()
6448 ret = si_populate_smc_tdp_limits(rdev, boot_ps); si_dpm_enable()
6453 ret = si_populate_smc_tdp_limits_2(rdev, boot_ps); si_dpm_enable()
6458 si_program_response_times(rdev); si_dpm_enable()
6459 si_program_ds_registers(rdev); si_dpm_enable()
6460 si_dpm_start_smc(rdev); si_dpm_enable()
6461 ret = si_notify_smc_display_change(rdev, false); si_dpm_enable()
6466 si_enable_sclk_control(rdev, true); si_dpm_enable()
6467 si_start_dpm(rdev); si_dpm_enable()
6469 si_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); si_dpm_enable()
6471 si_thermal_start_thermal_controller(rdev); si_dpm_enable()
6473 ni_update_current_ps(rdev, boot_ps); si_dpm_enable()
6478 static int si_set_temperature_range(struct radeon_device *rdev) si_set_temperature_range() argument
6482 ret = si_thermal_enable_alert(rdev, false); si_set_temperature_range()
6485 ret = si_thermal_set_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); si_set_temperature_range()
6488 ret = si_thermal_enable_alert(rdev, true); si_set_temperature_range()
6495 int si_dpm_late_enable(struct radeon_device *rdev) si_dpm_late_enable() argument
6499 ret = si_set_temperature_range(rdev); si_dpm_late_enable()
6506 void si_dpm_disable(struct radeon_device *rdev) si_dpm_disable() argument
6508 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_dpm_disable()
6509 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; si_dpm_disable()
6511 if (!si_is_smc_running(rdev)) si_dpm_disable()
6513 si_thermal_stop_thermal_controller(rdev); si_dpm_disable()
6514 si_disable_ulv(rdev); si_dpm_disable()
6515 si_clear_vc(rdev); si_dpm_disable()
6517 si_enable_thermal_protection(rdev, false); si_dpm_disable()
6518 si_enable_power_containment(rdev, boot_ps, false); si_dpm_disable()
6519 si_enable_smc_cac(rdev, boot_ps, false); si_dpm_disable()
6520 si_enable_spread_spectrum(rdev, false); si_dpm_disable()
6521 si_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false); si_dpm_disable()
6522 si_stop_dpm(rdev); si_dpm_disable()
6523 si_reset_to_default(rdev); si_dpm_disable()
6524 si_dpm_stop_smc(rdev); si_dpm_disable()
6525 si_force_switch_to_arb_f0(rdev); si_dpm_disable()
6527 ni_update_current_ps(rdev, boot_ps); si_dpm_disable()
6530 int si_dpm_pre_set_power_state(struct radeon_device *rdev) si_dpm_pre_set_power_state() argument
6532 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_pre_set_power_state()
6533 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; si_dpm_pre_set_power_state()
6536 ni_update_requested_ps(rdev, new_ps); si_dpm_pre_set_power_state()
6538 si_apply_state_adjust_rules(rdev, &eg_pi->requested_rps); si_dpm_pre_set_power_state()
6543 static int si_power_control_set_level(struct radeon_device *rdev) si_power_control_set_level() argument
6545 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; si_power_control_set_level()
6548 ret = si_restrict_performance_levels_before_switch(rdev); si_power_control_set_level()
6551 ret = si_halt_smc(rdev); si_power_control_set_level()
6554 ret = si_populate_smc_tdp_limits(rdev, new_ps); si_power_control_set_level()
6557 ret = si_populate_smc_tdp_limits_2(rdev, new_ps); si_power_control_set_level()
6560 ret = si_resume_smc(rdev); si_power_control_set_level()
6563 ret = si_set_sw_state(rdev); si_power_control_set_level()
6569 int si_dpm_set_power_state(struct radeon_device *rdev) si_dpm_set_power_state() argument
6571 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_set_power_state()
6576 ret = si_disable_ulv(rdev); si_dpm_set_power_state()
6581 ret = si_restrict_performance_levels_before_switch(rdev); si_dpm_set_power_state()
6587 si_request_link_speed_change_before_state_change(rdev, new_ps, old_ps); si_dpm_set_power_state()
6588 ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); si_dpm_set_power_state()
6589 ret = si_enable_power_containment(rdev, new_ps, false); si_dpm_set_power_state()
6594 ret = si_enable_smc_cac(rdev, new_ps, false); si_dpm_set_power_state()
6599 ret = si_halt_smc(rdev); si_dpm_set_power_state()
6604 ret = si_upload_sw_state(rdev, new_ps); si_dpm_set_power_state()
6609 ret = si_upload_smc_data(rdev); si_dpm_set_power_state()
6614 ret = si_upload_ulv_state(rdev); si_dpm_set_power_state()
6620 ret = si_upload_mc_reg_table(rdev, new_ps); si_dpm_set_power_state()
6626 ret = si_program_memory_timing_parameters(rdev, new_ps); si_dpm_set_power_state()
6631 si_set_pcie_lane_width_in_smc(rdev, new_ps, old_ps); si_dpm_set_power_state()
6633 ret = si_resume_smc(rdev); si_dpm_set_power_state()
6638 ret = si_set_sw_state(rdev); si_dpm_set_power_state()
6643 ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); si_dpm_set_power_state()
6644 si_set_vce_clock(rdev, new_ps, old_ps); si_dpm_set_power_state()
6646 si_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); si_dpm_set_power_state()
6647 ret = si_set_power_state_conditionally_enable_ulv(rdev, new_ps); si_dpm_set_power_state()
6652 ret = si_enable_smc_cac(rdev, new_ps, true); si_dpm_set_power_state()
6657 ret = si_enable_power_containment(rdev, new_ps, true); si_dpm_set_power_state()
6663 ret = si_power_control_set_level(rdev); si_dpm_set_power_state()
6672 void si_dpm_post_set_power_state(struct radeon_device *rdev) si_dpm_post_set_power_state() argument
6674 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_post_set_power_state()
6677 ni_update_current_ps(rdev, new_ps); si_dpm_post_set_power_state()
6681 void si_dpm_reset_asic(struct radeon_device *rdev)
6683 si_restrict_performance_levels_before_switch(rdev);
6684 si_disable_ulv(rdev);
6685 si_set_boot_state(rdev);
6689 void si_dpm_display_configuration_changed(struct radeon_device *rdev) si_dpm_display_configuration_changed() argument
6691 si_program_display_gap(rdev); si_dpm_display_configuration_changed()
6716 static void si_parse_pplib_non_clock_info(struct radeon_device *rdev, si_parse_pplib_non_clock_info() argument
6737 rdev->pm.dpm.boot_ps = rps; si_parse_pplib_non_clock_info()
6739 rdev->pm.dpm.uvd_ps = rps; si_parse_pplib_non_clock_info()
6742 static void si_parse_pplib_clock_info(struct radeon_device *rdev, si_parse_pplib_clock_info() argument
6746 struct rv7xx_power_info *pi = rv770_get_pi(rdev); si_parse_pplib_clock_info()
6747 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_parse_pplib_clock_info()
6748 struct si_power_info *si_pi = si_get_pi(rdev); si_parse_pplib_clock_info()
6764 pl->pcie_gen = r600_get_pcie_gen_support(rdev, si_parse_pplib_clock_info()
6770 ret = si_get_leakage_voltage_from_leakage_index(rdev, pl->vddc, si_parse_pplib_clock_info()
6801 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); si_parse_pplib_clock_info()
6802 pl->mclk = rdev->clock.default_mclk; si_parse_pplib_clock_info()
6803 pl->sclk = rdev->clock.default_sclk; si_parse_pplib_clock_info()
6811 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk; si_parse_pplib_clock_info()
6812 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk; si_parse_pplib_clock_info()
6813 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc; si_parse_pplib_clock_info()
6814 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci; si_parse_pplib_clock_info()
6818 static int si_parse_power_table(struct radeon_device *rdev) si_parse_power_table() argument
6820 struct radeon_mode_info *mode_info = &rdev->mode_info; si_parse_power_table()
6850 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * si_parse_power_table()
6852 if (!rdev->pm.dpm.ps) si_parse_power_table()
6861 if (!rdev->pm.power_state[i].clock_info) si_parse_power_table()
6865 kfree(rdev->pm.dpm.ps); si_parse_power_table()
6868 rdev->pm.dpm.ps[i].ps_priv = ps; si_parse_power_table()
6869 si_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], si_parse_power_table()
6883 si_parse_pplib_clock_info(rdev, si_parse_power_table()
6884 &rdev->pm.dpm.ps[i], k, si_parse_power_table()
6890 rdev->pm.dpm.num_ps = state_array->ucNumEntries; si_parse_power_table()
6895 clock_array_index = rdev->pm.dpm.vce_states[i].clk_idx; si_parse_power_table()
6902 rdev->pm.dpm.vce_states[i].sclk = sclk; si_parse_power_table()
6903 rdev->pm.dpm.vce_states[i].mclk = mclk; si_parse_power_table()
6909 int si_dpm_init(struct radeon_device *rdev) si_dpm_init() argument
6922 rdev->pm.dpm.priv = si_pi; si_dpm_init()
6927 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); si_dpm_init()
6933 si_pi->boot_pcie_gen = si_get_current_pcie_speed(rdev); si_dpm_init()
6935 si_set_max_cu_value(rdev); si_dpm_init()
6937 rv770_get_max_vddc(rdev); si_dpm_init()
6938 si_get_leakage_vddc(rdev); si_dpm_init()
6939 si_patch_dependency_tables_based_on_leakage(rdev); si_dpm_init()
6946 ret = r600_get_platform_caps(rdev); si_dpm_init()
6950 ret = r600_parse_extended_power_table(rdev); si_dpm_init()
6954 ret = si_parse_power_table(rdev); si_dpm_init()
6958 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries = si_dpm_init()
6960 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) { si_dpm_init()
6961 r600_free_extended_power_table(rdev); si_dpm_init()
6964 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4; si_dpm_init()
6965 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0; si_dpm_init()
6966 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0; si_dpm_init()
6967 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000; si_dpm_init()
6968 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720; si_dpm_init()
6969 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000; si_dpm_init()
6970 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810; si_dpm_init()
6971 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000; si_dpm_init()
6972 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900; si_dpm_init()
6974 if (rdev->pm.dpm.voltage_response_time == 0) si_dpm_init()
6975 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; si_dpm_init()
6976 if (rdev->pm.dpm.backbias_response_time == 0) si_dpm_init()
6977 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; si_dpm_init()
6979 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, si_dpm_init()
6989 if (si_is_special_1gb_platform(rdev)) si_dpm_init()
6999 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, si_dpm_init()
7003 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, si_dpm_init()
7006 radeon_atom_get_svi2_info(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, si_dpm_init()
7011 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, si_dpm_init()
7015 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, si_dpm_init()
7019 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, si_dpm_init()
7023 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, si_dpm_init()
7026 rv770_get_engine_memory_ss(rdev); si_dpm_init()
7037 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) si_dpm_init()
7047 radeon_acpi_is_pcie_performance_request_supported(rdev); si_dpm_init()
7054 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 4; si_dpm_init()
7055 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 15000; si_dpm_init()
7056 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200; si_dpm_init()
7057 rdev->pm.dpm.dyn_state.valid_sclk_values.count = 0; si_dpm_init()
7058 rdev->pm.dpm.dyn_state.valid_sclk_values.values = NULL; si_dpm_init()
7059 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0; si_dpm_init()
7060 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL; si_dpm_init()
7062 si_initialize_powertune_defaults(rdev); si_dpm_init()
7065 if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) || si_dpm_init()
7066 (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0)) si_dpm_init()
7067 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc = si_dpm_init()
7068 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; si_dpm_init()
7075 void si_dpm_fini(struct radeon_device *rdev) si_dpm_fini() argument
7079 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { si_dpm_fini()
7080 kfree(rdev->pm.dpm.ps[i].ps_priv); si_dpm_fini()
7082 kfree(rdev->pm.dpm.ps); si_dpm_fini()
7083 kfree(rdev->pm.dpm.priv); si_dpm_fini()
7084 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries); si_dpm_fini()
7085 r600_free_extended_power_table(rdev); si_dpm_fini()
7088 void si_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, si_dpm_debugfs_print_current_performance_level() argument
7091 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_debugfs_print_current_performance_level()
7109 u32 si_dpm_get_current_sclk(struct radeon_device *rdev) si_dpm_get_current_sclk() argument
7111 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_get_current_sclk()
7127 u32 si_dpm_get_current_mclk(struct radeon_device *rdev) si_dpm_get_current_mclk() argument
7129 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); si_dpm_get_current_mclk()
H A Dradeon_audio.c32 void r600_audio_enable(struct radeon_device *rdev, struct r600_audio_pin *pin,
34 void dce4_audio_enable(struct radeon_device *rdev, struct r600_audio_pin *pin,
36 void dce6_audio_enable(struct radeon_device *rdev, struct r600_audio_pin *pin,
38 u32 dce6_endpoint_rreg(struct radeon_device *rdev, u32 offset, u32 reg);
39 void dce6_endpoint_wreg(struct radeon_device *rdev,
63 struct r600_audio_pin* r600_audio_get_pin(struct radeon_device *rdev);
64 struct r600_audio_pin* dce6_audio_get_pin(struct radeon_device *rdev);
66 void r600_hdmi_audio_set_dto(struct radeon_device *rdev,
68 void dce3_2_audio_set_dto(struct radeon_device *rdev,
70 void dce4_hdmi_audio_set_dto(struct radeon_device *rdev,
72 void dce4_dp_audio_set_dto(struct radeon_device *rdev,
74 void dce6_hdmi_audio_set_dto(struct radeon_device *rdev,
76 void dce6_dp_audio_set_dto(struct radeon_device *rdev,
78 void r600_set_avi_packet(struct radeon_device *rdev, u32 offset,
80 void evergreen_set_avi_packet(struct radeon_device *rdev, u32 offset,
117 static u32 radeon_audio_rreg(struct radeon_device *rdev, u32 offset, u32 reg) radeon_audio_rreg() argument
122 static void radeon_audio_wreg(struct radeon_device *rdev, u32 offset, radeon_audio_wreg() argument
245 static void radeon_audio_enable(struct radeon_device *rdev, radeon_audio_enable() argument
256 if (rdev->mode_info.mode_config_initialized) { radeon_audio_enable()
257 list_for_each_entry(encoder, &rdev->ddev->mode_config.encoder_list, head) { radeon_audio_enable()
270 if (rdev->audio.funcs->enable) radeon_audio_enable()
271 rdev->audio.funcs->enable(rdev, pin, enable_mask); radeon_audio_enable()
274 static void radeon_audio_interface_init(struct radeon_device *rdev) radeon_audio_interface_init() argument
276 if (ASIC_IS_DCE6(rdev)) { radeon_audio_interface_init()
277 rdev->audio.funcs = &dce6_funcs; radeon_audio_interface_init()
278 rdev->audio.hdmi_funcs = &dce6_hdmi_funcs; radeon_audio_interface_init()
279 rdev->audio.dp_funcs = &dce6_dp_funcs; radeon_audio_interface_init()
280 } else if (ASIC_IS_DCE4(rdev)) { radeon_audio_interface_init()
281 rdev->audio.funcs = &dce4_funcs; radeon_audio_interface_init()
282 rdev->audio.hdmi_funcs = &dce4_hdmi_funcs; radeon_audio_interface_init()
283 rdev->audio.dp_funcs = &dce4_dp_funcs; radeon_audio_interface_init()
284 } else if (ASIC_IS_DCE32(rdev)) { radeon_audio_interface_init()
285 rdev->audio.funcs = &dce32_funcs; radeon_audio_interface_init()
286 rdev->audio.hdmi_funcs = &dce32_hdmi_funcs; radeon_audio_interface_init()
287 rdev->audio.dp_funcs = &dce32_dp_funcs; radeon_audio_interface_init()
289 rdev->audio.funcs = &r600_funcs; radeon_audio_interface_init()
290 rdev->audio.hdmi_funcs = &r600_hdmi_funcs; radeon_audio_interface_init()
291 rdev->audio.dp_funcs = 0; radeon_audio_interface_init()
295 static int radeon_audio_chipset_supported(struct radeon_device *rdev) radeon_audio_chipset_supported() argument
297 return ASIC_IS_DCE2(rdev) && !ASIC_IS_NODCE(rdev); radeon_audio_chipset_supported()
300 int radeon_audio_init(struct radeon_device *rdev) radeon_audio_init() argument
304 if (!radeon_audio || !radeon_audio_chipset_supported(rdev)) radeon_audio_init()
307 rdev->audio.enabled = true; radeon_audio_init()
309 if (ASIC_IS_DCE83(rdev)) /* KB: 2 streams, 3 endpoints */ radeon_audio_init()
310 rdev->audio.num_pins = 3; radeon_audio_init()
311 else if (ASIC_IS_DCE81(rdev)) /* KV: 4 streams, 7 endpoints */ radeon_audio_init()
312 rdev->audio.num_pins = 7; radeon_audio_init()
313 else if (ASIC_IS_DCE8(rdev)) /* BN/HW: 6 streams, 7 endpoints */ radeon_audio_init()
314 rdev->audio.num_pins = 7; radeon_audio_init()
315 else if (ASIC_IS_DCE64(rdev)) /* OL: 2 streams, 2 endpoints */ radeon_audio_init()
316 rdev->audio.num_pins = 2; radeon_audio_init()
317 else if (ASIC_IS_DCE61(rdev)) /* TN: 4 streams, 6 endpoints */ radeon_audio_init()
318 rdev->audio.num_pins = 6; radeon_audio_init()
319 else if (ASIC_IS_DCE6(rdev)) /* SI: 6 streams, 6 endpoints */ radeon_audio_init()
320 rdev->audio.num_pins = 6; radeon_audio_init()
322 rdev->audio.num_pins = 1; radeon_audio_init()
324 for (i = 0; i < rdev->audio.num_pins; i++) { radeon_audio_init()
325 rdev->audio.pin[i].channels = -1; radeon_audio_init()
326 rdev->audio.pin[i].rate = -1; radeon_audio_init()
327 rdev->audio.pin[i].bits_per_sample = -1; radeon_audio_init()
328 rdev->audio.pin[i].status_bits = 0; radeon_audio_init()
329 rdev->audio.pin[i].category_code = 0; radeon_audio_init()
330 rdev->audio.pin[i].connected = false; radeon_audio_init()
331 rdev->audio.pin[i].offset = pin_offsets[i]; radeon_audio_init()
332 rdev->audio.pin[i].id = i; radeon_audio_init()
335 radeon_audio_interface_init(rdev); radeon_audio_init()
338 for (i = 0; i < rdev->audio.num_pins; i++) radeon_audio_init()
339 radeon_audio_enable(rdev, &rdev->audio.pin[i], 0); radeon_audio_init()
344 u32 radeon_audio_endpoint_rreg(struct radeon_device *rdev, u32 offset, u32 reg) radeon_audio_endpoint_rreg() argument
346 if (rdev->audio.funcs->endpoint_rreg) radeon_audio_endpoint_rreg()
347 return rdev->audio.funcs->endpoint_rreg(rdev, offset, reg); radeon_audio_endpoint_rreg()
352 void radeon_audio_endpoint_wreg(struct radeon_device *rdev, u32 offset, radeon_audio_endpoint_wreg() argument
355 if (rdev->audio.funcs->endpoint_wreg) radeon_audio_endpoint_wreg()
356 rdev->audio.funcs->endpoint_wreg(rdev, offset, reg, v); radeon_audio_endpoint_wreg()
421 struct radeon_device *rdev = encoder->dev->dev_private; radeon_audio_get_pin() local
425 return radeon_encoder->audio->get_pin(rdev); radeon_audio_get_pin()
443 struct radeon_device *rdev = dev->dev_private; radeon_audio_detect() local
447 if (!radeon_audio_chipset_supported(rdev)) radeon_audio_detect()
461 radeon_encoder->audio = rdev->audio.dp_funcs; radeon_audio_detect()
463 radeon_encoder->audio = rdev->audio.hdmi_funcs; radeon_audio_detect()
465 radeon_encoder->audio = rdev->audio.hdmi_funcs; radeon_audio_detect()
471 radeon_audio_enable(rdev, dig->pin, 0xf); radeon_audio_detect()
473 radeon_audio_enable(rdev, dig->pin, 0); radeon_audio_detect()
477 radeon_audio_enable(rdev, dig->pin, 0); radeon_audio_detect()
482 void radeon_audio_fini(struct radeon_device *rdev) radeon_audio_fini() argument
486 if (!rdev->audio.enabled) radeon_audio_fini()
489 for (i = 0; i < rdev->audio.num_pins; i++) radeon_audio_fini()
490 radeon_audio_enable(rdev, &rdev->audio.pin[i], 0); radeon_audio_fini()
492 rdev->audio.enabled = false; radeon_audio_fini()
497 struct radeon_device *rdev = encoder->dev->dev_private; radeon_audio_set_dto() local
502 radeon_encoder->audio->set_dto(rdev, crtc, clock); radeon_audio_set_dto()
508 struct radeon_device *rdev = encoder->dev->dev_private; radeon_audio_set_avi_packet() local
544 radeon_encoder->audio->set_avi_packet(rdev, dig->afmt->offset, radeon_audio_set_avi_packet()
738 struct radeon_device *rdev = dev->dev_private; radeon_audio_dp_mode_set() local
753 radeon_audio_set_dto(encoder, rdev->clock.vco_freq * 10); radeon_audio_dp_mode_set()
H A Dradeon_ttm.c50 static int radeon_ttm_debugfs_init(struct radeon_device *rdev);
51 static void radeon_ttm_debugfs_fini(struct radeon_device *rdev);
56 struct radeon_device *rdev; radeon_get_rdev() local
59 rdev = container_of(mman, struct radeon_device, mman); radeon_get_rdev()
60 return rdev; radeon_get_rdev()
77 static int radeon_ttm_global_init(struct radeon_device *rdev) radeon_ttm_global_init() argument
82 rdev->mman.mem_global_referenced = false; radeon_ttm_global_init()
83 global_ref = &rdev->mman.mem_global_ref; radeon_ttm_global_init()
95 rdev->mman.bo_global_ref.mem_glob = radeon_ttm_global_init()
96 rdev->mman.mem_global_ref.object; radeon_ttm_global_init()
97 global_ref = &rdev->mman.bo_global_ref.ref; radeon_ttm_global_init()
105 drm_global_item_unref(&rdev->mman.mem_global_ref); radeon_ttm_global_init()
109 rdev->mman.mem_global_referenced = true; radeon_ttm_global_init()
113 static void radeon_ttm_global_fini(struct radeon_device *rdev) radeon_ttm_global_fini() argument
115 if (rdev->mman.mem_global_referenced) { radeon_ttm_global_fini()
116 drm_global_item_unref(&rdev->mman.bo_global_ref.ref); radeon_ttm_global_fini()
117 drm_global_item_unref(&rdev->mman.mem_global_ref); radeon_ttm_global_fini()
118 rdev->mman.mem_global_referenced = false; radeon_ttm_global_fini()
130 struct radeon_device *rdev; radeon_init_mem_type() local
132 rdev = radeon_get_rdev(bdev); radeon_init_mem_type()
143 man->gpu_offset = rdev->mc.gtt_start; radeon_init_mem_type()
148 if (rdev->flags & RADEON_IS_AGP) { radeon_init_mem_type()
149 if (!rdev->ddev->agp) { radeon_init_mem_type()
154 if (!rdev->ddev->agp->cant_use_aperture) radeon_init_mem_type()
165 man->gpu_offset = rdev->mc.vram_start; radeon_init_mem_type()
199 if (rbo->rdev->ring[radeon_copy_ring_index(rbo->rdev)].ready == false) radeon_evict_flags()
201 else if (rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size && radeon_evict_flags()
202 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) { radeon_evict_flags()
203 unsigned fpfn = rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; radeon_evict_flags()
258 struct radeon_device *rdev; radeon_move_blit() local
264 rdev = radeon_get_rdev(bo->bdev); radeon_move_blit()
265 ridx = radeon_copy_ring_index(rdev); radeon_move_blit()
271 old_start += rdev->mc.vram_start; radeon_move_blit()
274 old_start += rdev->mc.gtt_start; radeon_move_blit()
282 new_start += rdev->mc.vram_start; radeon_move_blit()
285 new_start += rdev->mc.gtt_start; radeon_move_blit()
291 if (!rdev->ring[ridx].ready) { radeon_move_blit()
299 fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->resv); radeon_move_blit()
314 struct radeon_device *rdev; radeon_move_vram_ram() local
321 rdev = radeon_get_rdev(bo->bdev); radeon_move_vram_ram()
361 struct radeon_device *rdev; radeon_move_ram_vram() local
368 rdev = radeon_get_rdev(bo->bdev); radeon_move_ram_vram()
401 struct radeon_device *rdev; radeon_bo_move() local
405 rdev = radeon_get_rdev(bo->bdev); radeon_bo_move()
418 if (!rdev->ring[radeon_copy_ring_index(rdev)].ready || radeon_bo_move()
419 rdev->asic->copy.copy == NULL) { radeon_bo_move()
445 atomic64_add((u64)bo->num_pages << PAGE_SHIFT, &rdev->num_bytes_moved); radeon_bo_move()
452 struct radeon_device *rdev = radeon_get_rdev(bdev); radeon_ttm_io_mem_reserve() local
467 if (rdev->flags & RADEON_IS_AGP) { radeon_ttm_io_mem_reserve()
470 mem->bus.base = rdev->mc.agp_base; radeon_ttm_io_mem_reserve()
471 mem->bus.is_iomem = !rdev->ddev->agp->cant_use_aperture; radeon_ttm_io_mem_reserve()
478 if ((mem->bus.offset + mem->bus.size) > rdev->mc.visible_vram_size) radeon_ttm_io_mem_reserve()
480 mem->bus.base = rdev->mc.aper_base; radeon_ttm_io_mem_reserve()
503 rdev->ddev->hose->dense_mem_base; radeon_ttm_io_mem_reserve()
521 struct radeon_device *rdev; member in struct:radeon_ttm_tt
532 struct radeon_device *rdev = radeon_get_rdev(ttm->bdev); radeon_ttm_tt_pin_userptr() local
575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); radeon_ttm_tt_pin_userptr()
594 struct radeon_device *rdev = radeon_get_rdev(ttm->bdev); radeon_ttm_tt_unpin_userptr() local
607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); radeon_ttm_tt_unpin_userptr()
641 r = radeon_gart_bind(gtt->rdev, gtt->offset, ttm->num_pages, radeon_ttm_backend_bind()
655 radeon_gart_unbind(gtt->rdev, gtt->offset, ttm->num_pages); radeon_ttm_backend_unbind()
681 struct radeon_device *rdev; radeon_ttm_tt_create() local
684 rdev = radeon_get_rdev(bdev); radeon_ttm_tt_create()
686 if (rdev->flags & RADEON_IS_AGP) { radeon_ttm_tt_create()
687 return ttm_agp_tt_create(bdev, rdev->ddev->agp->bridge, radeon_ttm_tt_create()
697 gtt->rdev = rdev; radeon_ttm_tt_create()
715 struct radeon_device *rdev; radeon_ttm_tt_populate() local
740 rdev = radeon_get_rdev(ttm->bdev); radeon_ttm_tt_populate()
742 if (rdev->flags & RADEON_IS_AGP) { radeon_ttm_tt_populate()
749 return ttm_dma_populate(&gtt->ttm, rdev->dev); radeon_ttm_tt_populate()
759 gtt->ttm.dma_address[i] = pci_map_page(rdev->pdev, ttm->pages[i], radeon_ttm_tt_populate()
762 if (pci_dma_mapping_error(rdev->pdev, gtt->ttm.dma_address[i])) { radeon_ttm_tt_populate()
764 pci_unmap_page(rdev->pdev, gtt->ttm.dma_address[i], radeon_ttm_tt_populate()
777 struct radeon_device *rdev; radeon_ttm_tt_unpopulate() local
791 rdev = radeon_get_rdev(ttm->bdev); radeon_ttm_tt_unpopulate()
793 if (rdev->flags & RADEON_IS_AGP) { radeon_ttm_tt_unpopulate()
801 ttm_dma_unpopulate(&gtt->ttm, rdev->dev); radeon_ttm_tt_unpopulate()
808 pci_unmap_page(rdev->pdev, gtt->ttm.dma_address[i], radeon_ttm_tt_unpopulate()
865 int radeon_ttm_init(struct radeon_device *rdev) radeon_ttm_init() argument
869 r = radeon_ttm_global_init(rdev); radeon_ttm_init()
874 r = ttm_bo_device_init(&rdev->mman.bdev, radeon_ttm_init()
875 rdev->mman.bo_global_ref.ref.object, radeon_ttm_init()
877 rdev->ddev->anon_inode->i_mapping, radeon_ttm_init()
879 rdev->need_dma32); radeon_ttm_init()
884 rdev->mman.initialized = true; radeon_ttm_init()
885 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_VRAM, radeon_ttm_init()
886 rdev->mc.real_vram_size >> PAGE_SHIFT); radeon_ttm_init()
892 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); radeon_ttm_init()
894 r = radeon_bo_create(rdev, 256 * 1024, PAGE_SIZE, true, radeon_ttm_init()
896 NULL, &rdev->stollen_vga_memory); radeon_ttm_init()
900 r = radeon_bo_reserve(rdev->stollen_vga_memory, false); radeon_ttm_init()
903 r = radeon_bo_pin(rdev->stollen_vga_memory, RADEON_GEM_DOMAIN_VRAM, NULL); radeon_ttm_init()
904 radeon_bo_unreserve(rdev->stollen_vga_memory); radeon_ttm_init()
906 radeon_bo_unref(&rdev->stollen_vga_memory); radeon_ttm_init()
910 (unsigned) (rdev->mc.real_vram_size / (1024 * 1024))); radeon_ttm_init()
911 r = ttm_bo_init_mm(&rdev->mman.bdev, TTM_PL_TT, radeon_ttm_init()
912 rdev->mc.gtt_size >> PAGE_SHIFT); radeon_ttm_init()
918 (unsigned)(rdev->mc.gtt_size / (1024 * 1024))); radeon_ttm_init()
920 r = radeon_ttm_debugfs_init(rdev); radeon_ttm_init()
928 void radeon_ttm_fini(struct radeon_device *rdev) radeon_ttm_fini() argument
932 if (!rdev->mman.initialized) radeon_ttm_fini()
934 radeon_ttm_debugfs_fini(rdev); radeon_ttm_fini()
935 if (rdev->stollen_vga_memory) { radeon_ttm_fini()
936 r = radeon_bo_reserve(rdev->stollen_vga_memory, false); radeon_ttm_fini()
938 radeon_bo_unpin(rdev->stollen_vga_memory); radeon_ttm_fini()
939 radeon_bo_unreserve(rdev->stollen_vga_memory); radeon_ttm_fini()
941 radeon_bo_unref(&rdev->stollen_vga_memory); radeon_ttm_fini()
943 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_VRAM); radeon_ttm_fini()
944 ttm_bo_clean_mm(&rdev->mman.bdev, TTM_PL_TT); radeon_ttm_fini()
945 ttm_bo_device_release(&rdev->mman.bdev); radeon_ttm_fini()
946 radeon_gart_fini(rdev); radeon_ttm_fini()
947 radeon_ttm_global_fini(rdev); radeon_ttm_fini()
948 rdev->mman.initialized = false; radeon_ttm_fini()
954 void radeon_ttm_set_active_vram_size(struct radeon_device *rdev, u64 size) radeon_ttm_set_active_vram_size() argument
958 if (!rdev->mman.initialized) radeon_ttm_set_active_vram_size()
961 man = &rdev->mman.bdev.man[TTM_PL_VRAM]; radeon_ttm_set_active_vram_size()
972 struct radeon_device *rdev; radeon_ttm_fault() local
979 rdev = radeon_get_rdev(bo->bdev); radeon_ttm_fault()
980 down_read(&rdev->pm.mclk_lock); radeon_ttm_fault()
982 up_read(&rdev->pm.mclk_lock); radeon_ttm_fault()
989 struct radeon_device *rdev; radeon_mmap() local
997 rdev = file_priv->minor->dev->dev_private; radeon_mmap()
998 if (rdev == NULL) { radeon_mmap()
1001 r = ttm_bo_mmap(filp, vma, &rdev->mman.bdev); radeon_mmap()
1021 struct radeon_device *rdev = dev->dev_private; radeon_mm_dump_table() local
1022 struct drm_mm *mm = (struct drm_mm *)rdev->mman.bdev.man[ttm_pl].priv; radeon_mm_dump_table()
1024 struct ttm_bo_global *glob = rdev->mman.bdev.glob; radeon_mm_dump_table()
1046 struct radeon_device *rdev = inode->i_private; radeon_ttm_vram_open() local
1047 i_size_write(inode, rdev->mc.mc_vram_size); radeon_ttm_vram_open()
1055 struct radeon_device *rdev = f->private_data; radeon_ttm_vram_read() local
1066 if (*pos >= rdev->mc.mc_vram_size) radeon_ttm_vram_read()
1069 spin_lock_irqsave(&rdev->mmio_idx_lock, flags); radeon_ttm_vram_read()
1071 if (rdev->family >= CHIP_CEDAR) radeon_ttm_vram_read()
1074 spin_unlock_irqrestore(&rdev->mmio_idx_lock, flags); radeon_ttm_vram_read()
1098 struct radeon_device *rdev = inode->i_private; radeon_ttm_gtt_open() local
1099 i_size_write(inode, rdev->mc.gtt_size); radeon_ttm_gtt_open()
1107 struct radeon_device *rdev = f->private_data; radeon_ttm_gtt_read() local
1118 if (p >= rdev->gart.num_cpu_pages) radeon_ttm_gtt_read()
1121 page = rdev->gart.pages[p]; radeon_ttm_gtt_read()
1127 kunmap(rdev->gart.pages[p]); radeon_ttm_gtt_read()
1152 static int radeon_ttm_debugfs_init(struct radeon_device *rdev) radeon_ttm_debugfs_init() argument
1157 struct drm_minor *minor = rdev->ddev->primary; radeon_ttm_debugfs_init()
1161 rdev, &radeon_ttm_vram_fops); radeon_ttm_debugfs_init()
1164 rdev->mman.vram = ent; radeon_ttm_debugfs_init()
1167 rdev, &radeon_ttm_gtt_fops); radeon_ttm_debugfs_init()
1170 rdev->mman.gtt = ent; radeon_ttm_debugfs_init()
1179 return radeon_debugfs_add_files(rdev, radeon_ttm_debugfs_list, count); radeon_ttm_debugfs_init()
1186 static void radeon_ttm_debugfs_fini(struct radeon_device *rdev) radeon_ttm_debugfs_fini() argument
1190 debugfs_remove(rdev->mman.vram); radeon_ttm_debugfs_fini()
1191 rdev->mman.vram = NULL; radeon_ttm_debugfs_fini()
1193 debugfs_remove(rdev->mman.gtt); radeon_ttm_debugfs_fini()
1194 rdev->mman.gtt = NULL; radeon_ttm_debugfs_fini()
H A Dcypress_dpm.h113 int cypress_convert_power_level_to_smc(struct radeon_device *rdev,
117 int cypress_populate_smc_acpi_state(struct radeon_device *rdev,
119 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev,
121 int cypress_populate_smc_initial_state(struct radeon_device *rdev,
124 u32 cypress_calculate_burst_time(struct radeon_device *rdev,
126 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev,
129 int cypress_upload_sw_state(struct radeon_device *rdev,
131 int cypress_upload_mc_reg_table(struct radeon_device *rdev,
133 void cypress_program_memory_timing_parameters(struct radeon_device *rdev,
135 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
138 int cypress_construct_voltage_tables(struct radeon_device *rdev);
139 int cypress_get_mvdd_configuration(struct radeon_device *rdev);
140 void cypress_enable_spread_spectrum(struct radeon_device *rdev,
142 void cypress_enable_display_gap(struct radeon_device *rdev);
143 int cypress_get_table_locations(struct radeon_device *rdev);
144 int cypress_populate_mc_reg_table(struct radeon_device *rdev,
146 void cypress_program_response_times(struct radeon_device *rdev);
147 int cypress_notify_smc_display_change(struct radeon_device *rdev,
149 void cypress_enable_sclk_control(struct radeon_device *rdev,
151 void cypress_enable_mclk_control(struct radeon_device *rdev,
153 void cypress_start_dpm(struct radeon_device *rdev);
154 void cypress_advertise_gen2_capability(struct radeon_device *rdev);
155 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf);
156 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev,
158 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk);
H A Dsumo_smc.c34 struct sumo_power_info *sumo_get_pi(struct radeon_device *rdev);
36 static void sumo_send_msg_to_smu(struct radeon_device *rdev, u32 id) sumo_send_msg_to_smu() argument
41 for (i = 0; i < rdev->usec_timeout; i++) { sumo_send_msg_to_smu()
50 for (i = 0; i < rdev->usec_timeout; i++) { sumo_send_msg_to_smu()
56 for (i = 0; i < rdev->usec_timeout; i++) { sumo_send_msg_to_smu()
62 for (i = 0; i < rdev->usec_timeout; i++) { sumo_send_msg_to_smu()
72 void sumo_initialize_m3_arb(struct radeon_device *rdev) sumo_initialize_m3_arb() argument
74 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_initialize_m3_arb()
93 static bool sumo_is_alt_vddnb_supported(struct radeon_device *rdev) sumo_is_alt_vddnb_supported() argument
95 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_is_alt_vddnb_supported()
101 if ((rdev->family == CHIP_SUMO) || (rdev->family == CHIP_SUMO2)) { sumo_is_alt_vddnb_supported()
109 void sumo_smu_notify_alt_vddnb_change(struct radeon_device *rdev, sumo_smu_notify_alt_vddnb_change() argument
114 if (!sumo_is_alt_vddnb_supported(rdev)) sumo_smu_notify_alt_vddnb_change()
125 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_ALTVDDNB_NOTIFY); sumo_smu_notify_alt_vddnb_change()
128 void sumo_smu_pg_init(struct radeon_device *rdev) sumo_smu_pg_init() argument
130 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_PG_INIT); sumo_smu_pg_init()
144 void sumo_enable_boost_timer(struct radeon_device *rdev) sumo_enable_boost_timer() argument
146 struct sumo_power_info *pi = sumo_get_pi(rdev); sumo_enable_boost_timer()
148 u32 xclk = radeon_get_xclk(rdev); sumo_enable_boost_timer()
163 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_GFX_SRV_ID_20); sumo_enable_boost_timer()
166 void sumo_set_tdp_limit(struct radeon_device *rdev, u32 index, u32 tdp_limit) sumo_set_tdp_limit() argument
208 void sumo_boost_state_enable(struct radeon_device *rdev, bool enable) sumo_boost_state_enable() argument
217 u32 sumo_get_running_fw_version(struct radeon_device *rdev) sumo_get_running_fw_version() argument
H A Dsi_smc.c33 static int si_set_smc_sram_address(struct radeon_device *rdev, si_set_smc_sram_address() argument
47 int si_copy_bytes_to_smc(struct radeon_device *rdev, si_copy_bytes_to_smc() argument
62 spin_lock_irqsave(&rdev->smc_idx_lock, flags); si_copy_bytes_to_smc()
67 ret = si_set_smc_sram_address(rdev, addr, limit); si_copy_bytes_to_smc()
82 ret = si_set_smc_sram_address(rdev, addr, limit); si_copy_bytes_to_smc()
100 ret = si_set_smc_sram_address(rdev, addr, limit); si_copy_bytes_to_smc()
108 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); si_copy_bytes_to_smc()
113 void si_start_smc(struct radeon_device *rdev) si_start_smc() argument
122 void si_reset_smc(struct radeon_device *rdev) si_reset_smc() argument
136 int si_program_jump_on_start(struct radeon_device *rdev) si_program_jump_on_start() argument
140 return si_copy_bytes_to_smc(rdev, 0x0, data, 4, sizeof(data)+1); si_program_jump_on_start()
143 void si_stop_smc_clock(struct radeon_device *rdev) si_stop_smc_clock() argument
152 void si_start_smc_clock(struct radeon_device *rdev) si_start_smc_clock() argument
161 bool si_is_smc_running(struct radeon_device *rdev) si_is_smc_running() argument
172 PPSMC_Result si_send_msg_to_smc(struct radeon_device *rdev, PPSMC_Msg msg) si_send_msg_to_smc() argument
177 if (!si_is_smc_running(rdev)) si_send_msg_to_smc()
182 for (i = 0; i < rdev->usec_timeout; i++) { si_send_msg_to_smc()
193 PPSMC_Result si_wait_for_smc_inactive(struct radeon_device *rdev) si_wait_for_smc_inactive() argument
198 if (!si_is_smc_running(rdev)) si_wait_for_smc_inactive()
201 for (i = 0; i < rdev->usec_timeout; i++) { si_wait_for_smc_inactive()
211 int si_load_smc_ucode(struct radeon_device *rdev, u32 limit) si_load_smc_ucode() argument
219 if (!rdev->smc_fw) si_load_smc_ucode()
222 if (rdev->new_fw) { si_load_smc_ucode()
224 (const struct smc_firmware_header_v1_0 *)rdev->smc_fw->data; si_load_smc_ucode()
231 (rdev->smc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); si_load_smc_ucode()
233 switch (rdev->family) { si_load_smc_ucode()
258 src = (const u8 *)rdev->smc_fw->data; si_load_smc_ucode()
264 spin_lock_irqsave(&rdev->smc_idx_lock, flags); si_load_smc_ucode()
277 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); si_load_smc_ucode()
282 int si_read_smc_sram_dword(struct radeon_device *rdev, u32 smc_address, si_read_smc_sram_dword() argument
288 spin_lock_irqsave(&rdev->smc_idx_lock, flags); si_read_smc_sram_dword()
289 ret = si_set_smc_sram_address(rdev, smc_address, limit); si_read_smc_sram_dword()
292 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); si_read_smc_sram_dword()
297 int si_write_smc_sram_dword(struct radeon_device *rdev, u32 smc_address, si_write_smc_sram_dword() argument
303 spin_lock_irqsave(&rdev->smc_idx_lock, flags); si_write_smc_sram_dword()
304 ret = si_set_smc_sram_address(rdev, smc_address, limit); si_write_smc_sram_dword()
307 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); si_write_smc_sram_dword()
H A Dci_smc.c33 static int ci_set_smc_sram_address(struct radeon_device *rdev, ci_set_smc_sram_address() argument
47 int ci_copy_bytes_to_smc(struct radeon_device *rdev, ci_copy_bytes_to_smc() argument
64 spin_lock_irqsave(&rdev->smc_idx_lock, flags); ci_copy_bytes_to_smc()
69 ret = ci_set_smc_sram_address(rdev, addr, limit); ci_copy_bytes_to_smc()
84 ret = ci_set_smc_sram_address(rdev, addr, limit); ci_copy_bytes_to_smc()
101 ret = ci_set_smc_sram_address(rdev, addr, limit); ci_copy_bytes_to_smc()
109 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); ci_copy_bytes_to_smc()
114 void ci_start_smc(struct radeon_device *rdev) ci_start_smc() argument
122 void ci_reset_smc(struct radeon_device *rdev) ci_reset_smc() argument
130 int ci_program_jump_on_start(struct radeon_device *rdev) ci_program_jump_on_start() argument
134 return ci_copy_bytes_to_smc(rdev, 0x0, data, 4, sizeof(data)+1); ci_program_jump_on_start()
137 void ci_stop_smc_clock(struct radeon_device *rdev) ci_stop_smc_clock() argument
146 void ci_start_smc_clock(struct radeon_device *rdev) ci_start_smc_clock() argument
155 bool ci_is_smc_running(struct radeon_device *rdev) ci_is_smc_running() argument
166 PPSMC_Result ci_send_msg_to_smc(struct radeon_device *rdev, PPSMC_Msg msg) ci_send_msg_to_smc() argument
171 if (!ci_is_smc_running(rdev)) ci_send_msg_to_smc()
176 for (i = 0; i < rdev->usec_timeout; i++) { ci_send_msg_to_smc()
188 PPSMC_Result ci_wait_for_smc_inactive(struct radeon_device *rdev)
193 if (!ci_is_smc_running(rdev))
196 for (i = 0; i < rdev->usec_timeout; i++) {
207 int ci_load_smc_ucode(struct radeon_device *rdev, u32 limit) ci_load_smc_ucode() argument
215 if (!rdev->smc_fw) ci_load_smc_ucode()
218 if (rdev->new_fw) { ci_load_smc_ucode()
220 (const struct smc_firmware_header_v1_0 *)rdev->smc_fw->data; ci_load_smc_ucode()
227 (rdev->smc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); ci_load_smc_ucode()
229 switch (rdev->family) { ci_load_smc_ucode()
243 src = (const u8 *)rdev->smc_fw->data; ci_load_smc_ucode()
249 spin_lock_irqsave(&rdev->smc_idx_lock, flags); ci_load_smc_ucode()
262 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); ci_load_smc_ucode()
267 int ci_read_smc_sram_dword(struct radeon_device *rdev, ci_read_smc_sram_dword() argument
273 spin_lock_irqsave(&rdev->smc_idx_lock, flags); ci_read_smc_sram_dword()
274 ret = ci_set_smc_sram_address(rdev, smc_address, limit); ci_read_smc_sram_dword()
277 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); ci_read_smc_sram_dword()
282 int ci_write_smc_sram_dword(struct radeon_device *rdev, ci_write_smc_sram_dword() argument
288 spin_lock_irqsave(&rdev->smc_idx_lock, flags); ci_write_smc_sram_dword()
289 ret = ci_set_smc_sram_address(rdev, smc_address, limit); ci_write_smc_sram_dword()
292 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); ci_write_smc_sram_dword()
H A Dradeon_fb.c48 struct radeon_device *rdev; member in struct:radeon_fbdev
66 int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled) radeon_align_pitch() argument
69 int align_large = (ASIC_IS_AVIVO(rdev)) || tiled; radeon_align_pitch()
108 struct radeon_device *rdev = rfbdev->rdev; radeonfb_create_pinned_object() local
121 mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp, radeonfb_create_pinned_object()
124 if (rdev->family >= CHIP_R600) radeonfb_create_pinned_object()
128 ret = radeon_gem_object_create(rdev, aligned_size, 0, radeonfb_create_pinned_object()
158 dev_err(rdev->dev, "FB failed to set tiling flags\n"); radeonfb_create_pinned_object()
167 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, radeonfb_create_pinned_object()
194 struct radeon_device *rdev = rfbdev->rdev; radeonfb_create() local
207 if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev)) radeonfb_create()
231 ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj); radeonfb_create()
251 tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start; radeonfb_create()
252 info->fix.smem_start = rdev->mc.aper_base + tmp; radeonfb_create()
260 info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base; radeonfb_create()
261 info->apertures->ranges[0].size = rdev->mc.aper_size; radeonfb_create()
271 DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base); radeonfb_create()
276 vga_switcheroo_client_fb_set(rdev->ddev->pdev, info); radeonfb_create()
294 void radeon_fb_output_poll_changed(struct radeon_device *rdev) radeon_fb_output_poll_changed() argument
296 drm_fb_helper_hotplug_event(&rdev->mode_info.rfbdev->helper); radeon_fb_output_poll_changed()
323 int radeon_fbdev_init(struct radeon_device *rdev) radeon_fbdev_init() argument
330 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024)) radeon_fbdev_init()
337 rfbdev->rdev = rdev; radeon_fbdev_init()
338 rdev->mode_info.rfbdev = rfbdev; radeon_fbdev_init()
340 drm_fb_helper_prepare(rdev->ddev, &rfbdev->helper, radeon_fbdev_init()
343 ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper, radeon_fbdev_init()
344 rdev->num_crtc, radeon_fbdev_init()
354 drm_helper_disable_unused_functions(rdev->ddev); radeon_fbdev_init()
369 void radeon_fbdev_fini(struct radeon_device *rdev) radeon_fbdev_fini() argument
371 if (!rdev->mode_info.rfbdev) radeon_fbdev_fini()
374 radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev); radeon_fbdev_fini()
375 kfree(rdev->mode_info.rfbdev); radeon_fbdev_fini()
376 rdev->mode_info.rfbdev = NULL; radeon_fbdev_fini()
379 void radeon_fbdev_set_suspend(struct radeon_device *rdev, int state) radeon_fbdev_set_suspend() argument
381 fb_set_suspend(rdev->mode_info.rfbdev->helper.fbdev, state); radeon_fbdev_set_suspend()
384 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) radeon_fbdev_robj_is_fb() argument
386 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj)) radeon_fbdev_robj_is_fb()
391 void radeon_fb_add_connector(struct radeon_device *rdev, struct drm_connector *connector) radeon_fb_add_connector() argument
393 drm_fb_helper_add_one_connector(&rdev->mode_info.rfbdev->helper, connector); radeon_fb_add_connector()
396 void radeon_fb_remove_connector(struct radeon_device *rdev, struct drm_connector *connector) radeon_fb_remove_connector() argument
398 drm_fb_helper_remove_one_connector(&rdev->mode_info.rfbdev->helper, connector); radeon_fb_remove_connector()
401 void radeon_fbdev_restore_mode(struct radeon_device *rdev) radeon_fbdev_restore_mode() argument
403 struct radeon_fbdev *rfbdev = rdev->mode_info.rfbdev; radeon_fbdev_restore_mode()
H A Dradeon_object.c41 int radeon_ttm_init(struct radeon_device *rdev);
42 void radeon_ttm_fini(struct radeon_device *rdev);
53 struct radeon_device *rdev = bo->rdev; radeon_update_memory_usage() local
59 atomic64_add(size, &rdev->gtt_usage); radeon_update_memory_usage()
61 atomic64_sub(size, &rdev->gtt_usage); radeon_update_memory_usage()
65 atomic64_add(size, &rdev->vram_usage); radeon_update_memory_usage()
67 atomic64_sub(size, &rdev->vram_usage); radeon_update_memory_usage()
80 mutex_lock(&bo->rdev->gem.mutex); radeon_ttm_bo_destroy()
82 mutex_unlock(&bo->rdev->gem.mutex); radeon_ttm_bo_destroy()
107 rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size) { radeon_ttm_placement_from_domain()
109 rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; radeon_ttm_placement_from_domain()
128 (rbo->rdev->flags & RADEON_IS_AGP)) { radeon_ttm_placement_from_domain()
147 rbo->rdev->flags & RADEON_IS_AGP) { radeon_ttm_placement_from_domain()
172 rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT; radeon_ttm_placement_from_domain()
178 int radeon_bo_create(struct radeon_device *rdev, radeon_bo_create() argument
201 acc_size = ttm_bo_dma_acc_size(&rdev->mman.bdev, size, radeon_bo_create()
207 r = drm_gem_object_init(rdev->ddev, &bo->gem_base, size); radeon_bo_create()
212 bo->rdev = rdev; radeon_bo_create()
222 if (!(rdev->flags & RADEON_IS_PCIE)) radeon_bo_create()
228 if (rdev->family >= CHIP_RV610 && rdev->family <= CHIP_RV635) radeon_bo_create()
259 down_read(&rdev->pm.mclk_lock); radeon_bo_create()
260 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, radeon_bo_create()
263 up_read(&rdev->pm.mclk_lock); radeon_bo_create()
318 struct radeon_device *rdev; radeon_bo_unref() local
322 rdev = (*bo)->rdev; radeon_bo_unref()
346 domain_start = bo->rdev->mc.vram_start; radeon_bo_pin_restricted()
348 domain_start = bo->rdev->mc.gtt_start; radeon_bo_pin_restricted()
360 (!max_offset || max_offset > bo->rdev->mc.visible_vram_size)) radeon_bo_pin_restricted()
362 bo->rdev->mc.visible_vram_size >> PAGE_SHIFT; radeon_bo_pin_restricted()
375 bo->rdev->vram_pin_size += radeon_bo_size(bo); radeon_bo_pin_restricted()
377 bo->rdev->gart_pin_size += radeon_bo_size(bo); radeon_bo_pin_restricted()
379 dev_err(bo->rdev->dev, "%p pin failed\n", bo); radeon_bo_pin_restricted()
394 dev_warn(bo->rdev->dev, "%p unpin not necessary\n", bo); radeon_bo_unpin()
407 bo->rdev->vram_pin_size -= radeon_bo_size(bo); radeon_bo_unpin()
409 bo->rdev->gart_pin_size -= radeon_bo_size(bo); radeon_bo_unpin()
411 dev_err(bo->rdev->dev, "%p validate failed for unpin\n", bo); radeon_bo_unpin()
416 int radeon_bo_evict_vram(struct radeon_device *rdev) radeon_bo_evict_vram() argument
419 if (0 && (rdev->flags & RADEON_IS_IGP)) { radeon_bo_evict_vram()
420 if (rdev->mc.igp_sideport_enabled == false) radeon_bo_evict_vram()
424 return ttm_bo_evict_mm(&rdev->mman.bdev, TTM_PL_VRAM); radeon_bo_evict_vram()
427 void radeon_bo_force_delete(struct radeon_device *rdev) radeon_bo_force_delete() argument
431 if (list_empty(&rdev->gem.objects)) { radeon_bo_force_delete()
434 dev_err(rdev->dev, "Userspace still has active objects !\n"); radeon_bo_force_delete()
435 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { radeon_bo_force_delete()
436 dev_err(rdev->dev, "%p %p %lu %lu force free\n", radeon_bo_force_delete()
439 mutex_lock(&bo->rdev->gem.mutex); radeon_bo_force_delete()
441 mutex_unlock(&bo->rdev->gem.mutex); radeon_bo_force_delete()
447 int radeon_bo_init(struct radeon_device *rdev) radeon_bo_init() argument
450 if (!rdev->fastfb_working) { radeon_bo_init()
451 rdev->mc.vram_mtrr = arch_phys_wc_add(rdev->mc.aper_base, radeon_bo_init()
452 rdev->mc.aper_size); radeon_bo_init()
455 rdev->mc.mc_vram_size >> 20, radeon_bo_init()
456 (unsigned long long)rdev->mc.aper_size >> 20); radeon_bo_init()
458 rdev->mc.vram_width, rdev->mc.vram_is_ddr ? 'D' : 'S'); radeon_bo_init()
459 return radeon_ttm_init(rdev); radeon_bo_init()
462 void radeon_bo_fini(struct radeon_device *rdev) radeon_bo_fini() argument
464 radeon_ttm_fini(rdev); radeon_bo_fini()
465 arch_phys_wc_del(rdev->mc.vram_mtrr); radeon_bo_fini()
470 static u64 radeon_bo_get_threshold_for_moves(struct radeon_device *rdev) radeon_bo_get_threshold_for_moves() argument
472 u64 real_vram_size = rdev->mc.real_vram_size; radeon_bo_get_threshold_for_moves()
473 u64 vram_usage = atomic64_read(&rdev->vram_usage); radeon_bo_get_threshold_for_moves()
520 int radeon_bo_list_validate(struct radeon_device *rdev, radeon_bo_list_validate() argument
528 u64 bytes_moved_threshold = radeon_bo_get_threshold_for_moves(rdev); radeon_bo_list_validate()
564 initial_bytes_moved = atomic64_read(&rdev->num_bytes_moved); list_for_each_entry()
566 bytes_moved += atomic64_read(&rdev->num_bytes_moved) - list_for_each_entry()
593 struct radeon_device *rdev = bo->rdev; radeon_bo_get_surface_reg() local
605 reg = &rdev->surface_regs[bo->surface_reg]; radeon_bo_get_surface_reg()
613 reg = &rdev->surface_regs[i]; radeon_bo_get_surface_reg()
627 reg = &rdev->surface_regs[steal]; radeon_bo_get_surface_reg()
640 radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch, radeon_bo_get_surface_reg()
648 struct radeon_device *rdev = bo->rdev; radeon_bo_clear_surface_reg() local
654 reg = &rdev->surface_regs[bo->surface_reg]; radeon_bo_clear_surface_reg()
655 radeon_clear_surface_reg(rdev, bo->surface_reg); radeon_bo_clear_surface_reg()
664 struct radeon_device *rdev = bo->rdev; radeon_bo_set_tiling_flags() local
667 if (rdev->family >= CHIP_CEDAR) { radeon_bo_set_tiling_flags()
772 radeon_vm_bo_invalidate(rbo->rdev, rbo); radeon_bo_move_notify()
784 struct radeon_device *rdev; radeon_bo_fault_reserve_notify() local
793 rdev = rbo->rdev; radeon_bo_fault_reserve_notify()
799 if ((offset + size) <= rdev->mc.visible_vram_size) radeon_bo_fault_reserve_notify()
804 lpfn = rdev->mc.visible_vram_size >> PAGE_SHIFT; radeon_bo_fault_reserve_notify()
821 if ((offset + size) > rdev->mc.visible_vram_size) radeon_bo_fault_reserve_notify()
H A Dni_dpm.c721 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
722 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
724 extern int ni_mc_load_microcode(struct radeon_device *rdev);
726 struct ni_power_info *ni_get_pi(struct radeon_device *rdev) ni_get_pi() argument
728 struct ni_power_info *pi = rdev->pm.dpm.priv; ni_get_pi()
761 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev, ni_calculate_leakage_for_v_and_t() argument
771 bool ni_dpm_vblank_too_short(struct radeon_device *rdev) ni_dpm_vblank_too_short() argument
773 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_dpm_vblank_too_short()
774 u32 vblank_time = r600_dpm_get_vblank_time(rdev); ni_dpm_vblank_too_short()
785 static void ni_apply_state_adjust_rules(struct radeon_device *rdev, ni_apply_state_adjust_rules() argument
795 if ((rdev->pm.dpm.new_active_crtc_count > 1) || ni_apply_state_adjust_rules()
796 ni_dpm_vblank_too_short(rdev)) ni_apply_state_adjust_rules()
801 if (rdev->pm.dpm.ac_power) ni_apply_state_adjust_rules()
802 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ni_apply_state_adjust_rules()
804 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; ni_apply_state_adjust_rules()
806 if (rdev->pm.dpm.ac_power == false) { ni_apply_state_adjust_rules()
829 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, ni_apply_state_adjust_rules()
864 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, ni_apply_state_adjust_rules()
869 btc_adjust_clock_combinations(rdev, max_limits, ni_apply_state_adjust_rules()
873 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, ni_apply_state_adjust_rules()
876 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, ni_apply_state_adjust_rules()
879 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, ni_apply_state_adjust_rules()
882 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, ni_apply_state_adjust_rules()
883 rdev->clock.current_dispclk, ni_apply_state_adjust_rules()
888 btc_apply_voltage_delta_rules(rdev, ni_apply_state_adjust_rules()
896 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) ni_apply_state_adjust_rules()
899 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) ni_apply_state_adjust_rules()
904 static void ni_cg_clockgating_default(struct radeon_device *rdev) ni_cg_clockgating_default() argument
912 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_cg_clockgating_default()
915 static void ni_gfx_clockgating_enable(struct radeon_device *rdev, ni_gfx_clockgating_enable() argument
929 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_gfx_clockgating_enable()
932 static void ni_mg_clockgating_default(struct radeon_device *rdev) ni_mg_clockgating_default() argument
940 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_mg_clockgating_default()
943 static void ni_mg_clockgating_enable(struct radeon_device *rdev, ni_mg_clockgating_enable() argument
957 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_mg_clockgating_enable()
960 static void ni_ls_clockgating_default(struct radeon_device *rdev) ni_ls_clockgating_default() argument
968 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_ls_clockgating_default()
971 static void ni_ls_clockgating_enable(struct radeon_device *rdev, ni_ls_clockgating_enable() argument
985 btc_program_mgcg_hw_sequence(rdev, ps, count); ni_ls_clockgating_enable()
989 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev, ni_patch_single_dependency_table_based_on_leakage() argument
992 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_patch_single_dependency_table_based_on_leakage()
1007 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev) ni_patch_dependency_tables_based_on_leakage() argument
1011 ret = ni_patch_single_dependency_table_based_on_leakage(rdev, ni_patch_dependency_tables_based_on_leakage()
1012 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk); ni_patch_dependency_tables_based_on_leakage()
1014 ret = ni_patch_single_dependency_table_based_on_leakage(rdev, ni_patch_dependency_tables_based_on_leakage()
1015 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk); ni_patch_dependency_tables_based_on_leakage()
1019 static void ni_stop_dpm(struct radeon_device *rdev) ni_stop_dpm() argument
1025 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1029 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1036 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev, ni_send_msg_to_smc_with_parameter() argument
1040 return rv770_send_msg_to_smc(rdev, msg); ni_send_msg_to_smc_with_parameter()
1043 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev) ni_restrict_performance_levels_before_switch() argument
1045 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) ni_restrict_performance_levels_before_switch()
1048 return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ? ni_restrict_performance_levels_before_switch()
1052 int ni_dpm_force_performance_level(struct radeon_device *rdev, ni_dpm_force_performance_level() argument
1056 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1059 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1062 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1065 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1068 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1071 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK) ni_dpm_force_performance_level()
1075 rdev->pm.dpm.forced_level = level; ni_dpm_force_performance_level()
1080 static void ni_stop_smc(struct radeon_device *rdev) ni_stop_smc() argument
1085 for (i = 0; i < rdev->usec_timeout; i++) { ni_stop_smc()
1094 r7xx_stop_smc(rdev); ni_stop_smc()
1097 static int ni_process_firmware_header(struct radeon_device *rdev) ni_process_firmware_header() argument
1099 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_process_firmware_header()
1100 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_process_firmware_header()
1101 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_process_firmware_header()
1105 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1115 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1125 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1135 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1145 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1155 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1165 ret = rv770_read_smc_sram_dword(rdev, ni_process_firmware_header()
1179 static void ni_read_clock_registers(struct radeon_device *rdev) ni_read_clock_registers() argument
1181 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_read_clock_registers()
1200 static int ni_enter_ulp_state(struct radeon_device *rdev)
1202 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1220 static void ni_program_response_times(struct radeon_device *rdev) ni_program_response_times() argument
1226 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1); ni_program_response_times()
1228 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time; ni_program_response_times()
1229 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time; ni_program_response_times()
1240 reference_clock = radeon_get_xclk(rdev); ni_program_response_times()
1249 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly); ni_program_response_times()
1250 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly); ni_program_response_times()
1251 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly); ni_program_response_times()
1252 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly); ni_program_response_times()
1253 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA); ni_program_response_times()
1254 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit); ni_program_response_times()
1257 static void ni_populate_smc_voltage_table(struct radeon_device *rdev, ni_populate_smc_voltage_table() argument
1269 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev, ni_populate_smc_voltage_tables() argument
1272 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_voltage_tables()
1273 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_smc_voltage_tables()
1277 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table); ni_populate_smc_voltage_tables()
1291 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table); ni_populate_smc_voltage_tables()
1299 static int ni_populate_voltage_value(struct radeon_device *rdev, ni_populate_voltage_value() argument
1320 static void ni_populate_mvdd_value(struct radeon_device *rdev, ni_populate_mvdd_value() argument
1324 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_mvdd_value()
1325 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_mvdd_value()
1342 static int ni_get_std_voltage_value(struct radeon_device *rdev, ni_get_std_voltage_value() argument
1346 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries && ni_get_std_voltage_value()
1347 ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count)) ni_get_std_voltage_value()
1348 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc; ni_get_std_voltage_value()
1355 static void ni_populate_std_voltage_value(struct radeon_device *rdev, ni_populate_std_voltage_value() argument
1363 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev) ni_get_smc_power_scaling_factor() argument
1366 u32 xclk = radeon_get_xclk(rdev); ni_get_smc_power_scaling_factor()
1380 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev, ni_calculate_power_boost_limit() argument
1385 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_calculate_power_boost_limit()
1386 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_calculate_power_boost_limit()
1400 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, ni_calculate_power_boost_limit()
1406 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med); ni_calculate_power_boost_limit()
1410 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, ni_calculate_power_boost_limit()
1416 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high); ni_calculate_power_boost_limit()
1432 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev, ni_calculate_adjusted_tdp_limits() argument
1438 if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit) ni_calculate_adjusted_tdp_limits()
1442 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; ni_calculate_adjusted_tdp_limits()
1443 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit); ni_calculate_adjusted_tdp_limits()
1445 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; ni_calculate_adjusted_tdp_limits()
1446 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit); ni_calculate_adjusted_tdp_limits()
1452 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev, ni_populate_smc_tdp_limits() argument
1455 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_tdp_limits()
1456 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_smc_tdp_limits()
1460 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev); ni_populate_smc_tdp_limits()
1471 ret = ni_calculate_adjusted_tdp_limits(rdev, ni_populate_smc_tdp_limits()
1473 rdev->pm.dpm.tdp_adjustment, ni_populate_smc_tdp_limits()
1479 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, ni_populate_smc_tdp_limits()
1492 ret = rv770_copy_bytes_to_smc(rdev, ni_populate_smc_tdp_limits()
1504 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev, ni_copy_and_switch_arb_sets() argument
1569 static int ni_init_arb_table_index(struct radeon_device *rdev) ni_init_arb_table_index() argument
1571 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_init_arb_table_index()
1572 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_init_arb_table_index()
1576 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start, ni_init_arb_table_index()
1584 return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start, ni_init_arb_table_index()
1588 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev) ni_initial_switch_from_arb_f0_to_f1() argument
1590 return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1); ni_initial_switch_from_arb_f0_to_f1()
1593 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev) ni_force_switch_to_arb_f0() argument
1595 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_force_switch_to_arb_f0()
1596 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_force_switch_to_arb_f0()
1600 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start, ni_force_switch_to_arb_f0()
1610 return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0); ni_force_switch_to_arb_f0()
1613 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev, ni_populate_memory_timing_parameters() argument
1621 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk); ni_populate_memory_timing_parameters()
1624 radeon_atom_set_engine_dram_timings(rdev, ni_populate_memory_timing_parameters()
1637 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev, ni_do_program_memory_timing_parameters() argument
1641 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_do_program_memory_timing_parameters()
1642 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_do_program_memory_timing_parameters()
1648 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs); ni_do_program_memory_timing_parameters()
1652 ret = rv770_copy_bytes_to_smc(rdev, ni_do_program_memory_timing_parameters()
1665 static int ni_program_memory_timing_parameters(struct radeon_device *rdev, ni_program_memory_timing_parameters() argument
1668 return ni_do_program_memory_timing_parameters(rdev, radeon_new_state, ni_program_memory_timing_parameters()
1672 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev, ni_populate_initial_mvdd_value() argument
1675 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_initial_mvdd_value()
1681 static int ni_populate_smc_initial_state(struct radeon_device *rdev, ni_populate_smc_initial_state() argument
1686 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_initial_state()
1687 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_smc_initial_state()
1688 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_smc_initial_state()
1730 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, ni_populate_smc_initial_state()
1736 ret = ni_get_std_voltage_value(rdev, ni_populate_smc_initial_state()
1740 ni_populate_std_voltage_value(rdev, std_vddc, ni_populate_smc_initial_state()
1746 ni_populate_voltage_value(rdev, ni_populate_smc_initial_state()
1751 ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd); ni_populate_smc_initial_state()
1765 cypress_get_strobe_mode_settings(rdev, ni_populate_smc_initial_state()
1792 static int ni_populate_smc_acpi_state(struct radeon_device *rdev, ni_populate_smc_acpi_state() argument
1795 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_acpi_state()
1796 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_smc_acpi_state()
1797 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_smc_acpi_state()
1816 ret = ni_populate_voltage_value(rdev, ni_populate_smc_acpi_state()
1822 ret = ni_get_std_voltage_value(rdev, ni_populate_smc_acpi_state()
1825 ni_populate_std_voltage_value(rdev, std_vddc, ni_populate_smc_acpi_state()
1839 ret = ni_populate_voltage_value(rdev, ni_populate_smc_acpi_state()
1846 ret = ni_get_std_voltage_value(rdev, ni_populate_smc_acpi_state()
1850 ni_populate_std_voltage_value(rdev, std_vddc, ni_populate_smc_acpi_state()
1859 ni_populate_voltage_value(rdev, ni_populate_smc_acpi_state()
1921 ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); ni_populate_smc_acpi_state()
1940 static int ni_init_smc_table(struct radeon_device *rdev) ni_init_smc_table() argument
1942 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_init_smc_table()
1943 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_init_smc_table()
1945 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps; ni_init_smc_table()
1950 ni_populate_smc_voltage_tables(rdev, table); ni_init_smc_table()
1952 switch (rdev->pm.int_thermal_type) { ni_init_smc_table()
1965 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) ni_init_smc_table()
1968 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) ni_init_smc_table()
1971 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) ni_init_smc_table()
1977 ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table); ni_init_smc_table()
1981 ret = ni_populate_smc_acpi_state(rdev, table); ni_init_smc_table()
1989 ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state, ni_init_smc_table()
1994 return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table, ni_init_smc_table()
1998 static int ni_calculate_sclk_params(struct radeon_device *rdev, ni_calculate_sclk_params() argument
2002 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_calculate_sclk_params()
2003 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_calculate_sclk_params()
2012 u32 reference_clock = rdev->clock.spll.reference_freq; ni_calculate_sclk_params()
2017 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, ni_calculate_sclk_params()
2044 if (radeon_atombios_get_asic_ss_info(rdev, &ss, ni_calculate_sclk_params()
2069 static int ni_populate_sclk_value(struct radeon_device *rdev, ni_populate_sclk_value() argument
2076 ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp); ni_populate_sclk_value()
2090 static int ni_init_smc_spll_table(struct radeon_device *rdev) ni_init_smc_spll_table() argument
2092 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_init_smc_spll_table()
2093 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_init_smc_spll_table()
2112 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params); ni_init_smc_spll_table()
2152 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table, ni_init_smc_spll_table()
2160 static int ni_populate_mclk_value(struct radeon_device *rdev, ni_populate_mclk_value() argument
2167 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_mclk_value()
2168 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_mclk_value()
2183 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, ni_populate_mclk_value()
2195 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div); ni_populate_mclk_value()
2240 if (radeon_atombios_get_asic_ss_info(rdev, &ss, ni_populate_mclk_value()
2242 u32 reference_clock = rdev->clock.mpll.reference_freq; ni_populate_mclk_value()
2294 static void ni_populate_smc_sp(struct radeon_device *rdev, ni_populate_smc_sp() argument
2299 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_sp()
2309 static int ni_convert_power_level_to_smc(struct radeon_device *rdev, ni_convert_power_level_to_smc() argument
2313 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_convert_power_level_to_smc()
2314 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_convert_power_level_to_smc()
2315 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_convert_power_level_to_smc()
2324 ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk); ni_convert_power_level_to_smc()
2342 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk); ni_convert_power_level_to_smc()
2345 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >= ni_convert_power_level_to_smc()
2356 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, ni_convert_power_level_to_smc()
2361 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1); ni_convert_power_level_to_smc()
2366 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, ni_convert_power_level_to_smc()
2371 ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc); ni_convert_power_level_to_smc()
2375 ni_populate_std_voltage_value(rdev, std_vddc, ni_convert_power_level_to_smc()
2379 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table, ni_convert_power_level_to_smc()
2385 ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); ni_convert_power_level_to_smc()
2390 static int ni_populate_smc_t(struct radeon_device *rdev, ni_populate_smc_t() argument
2394 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_smc_t()
2395 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_smc_t()
2450 static int ni_populate_power_containment_values(struct radeon_device *rdev, ni_populate_power_containment_values() argument
2454 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_power_containment_values()
2455 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_power_containment_values()
2456 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_power_containment_values()
2476 ret = ni_calculate_adjusted_tdp_limits(rdev, ni_populate_power_containment_values()
2478 rdev->pm.dpm.tdp_adjustment, ni_populate_power_containment_values()
2484 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit); ni_populate_power_containment_values()
2486 ret = rv770_write_smc_sram_dword(rdev, ni_populate_power_containment_values()
2490 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)), ni_populate_power_containment_values()
2536 static int ni_populate_sq_ramping_values(struct radeon_device *rdev, ni_populate_sq_ramping_values() argument
2540 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_sq_ramping_values()
2553 if (rdev->pm.dpm.sq_ramping_threshold == 0) ni_populate_sq_ramping_values()
2575 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) && ni_populate_sq_ramping_values()
2594 static int ni_enable_power_containment(struct radeon_device *rdev, ni_enable_power_containment() argument
2598 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_enable_power_containment()
2605 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive); ni_enable_power_containment()
2614 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive); ni_enable_power_containment()
2624 static int ni_convert_power_state_to_smc(struct radeon_device *rdev, ni_convert_power_state_to_smc() argument
2628 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_convert_power_state_to_smc()
2629 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_convert_power_state_to_smc()
2643 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i], ni_convert_power_state_to_smc()
2667 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold, ni_convert_power_state_to_smc()
2670 ni_populate_smc_sp(rdev, radeon_state, smc_state); ni_convert_power_state_to_smc()
2672 ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state); ni_convert_power_state_to_smc()
2676 ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state); ni_convert_power_state_to_smc()
2680 return ni_populate_smc_t(rdev, radeon_state, smc_state); ni_convert_power_state_to_smc()
2683 static int ni_upload_sw_state(struct radeon_device *rdev, ni_upload_sw_state() argument
2686 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_upload_sw_state()
2697 ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state); ni_upload_sw_state()
2701 ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end); ni_upload_sw_state()
2709 static int ni_set_mc_special_registers(struct radeon_device *rdev, ni_set_mc_special_registers() argument
2712 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_set_mc_special_registers()
2871 static int ni_initialize_mc_reg_table(struct radeon_device *rdev) ni_initialize_mc_reg_table() argument
2873 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_initialize_mc_reg_table()
2877 u8 module_index = rv770_get_memory_module_index(rdev); ni_initialize_mc_reg_table()
2897 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table); ni_initialize_mc_reg_table()
2909 ret = ni_set_mc_special_registers(rdev, ni_table); ni_initialize_mc_reg_table()
2922 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev, ni_populate_mc_reg_addresses() argument
2925 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_mc_reg_addresses()
2957 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, ni_convert_mc_reg_table_entry_to_smc() argument
2961 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_convert_mc_reg_table_entry_to_smc()
2978 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev, ni_convert_mc_reg_table_to_smc() argument
2986 ni_convert_mc_reg_table_entry_to_smc(rdev, ni_convert_mc_reg_table_to_smc()
2992 static int ni_populate_mc_reg_table(struct radeon_device *rdev, ni_populate_mc_reg_table() argument
2995 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_populate_mc_reg_table()
2996 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_populate_mc_reg_table()
2997 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_populate_mc_reg_table()
3003 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1); ni_populate_mc_reg_table()
3005 ni_populate_mc_reg_addresses(rdev, mc_reg_table); ni_populate_mc_reg_table()
3007 ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0], ni_populate_mc_reg_table()
3015 ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table); ni_populate_mc_reg_table()
3017 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start, ni_populate_mc_reg_table()
3023 static int ni_upload_mc_reg_table(struct radeon_device *rdev, ni_upload_mc_reg_table() argument
3026 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_upload_mc_reg_table()
3027 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_upload_mc_reg_table()
3028 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_upload_mc_reg_table()
3035 ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table); ni_upload_mc_reg_table()
3040 return rv770_copy_bytes_to_smc(rdev, address, ni_upload_mc_reg_table()
3046 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev, ni_init_driver_calculated_leakage_table() argument
3049 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_init_driver_calculated_leakage_table()
3050 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_init_driver_calculated_leakage_table()
3062 scaling_factor = ni_get_smc_power_scaling_factor(rdev); ni_init_driver_calculated_leakage_table()
3071 ni_calculate_leakage_for_v_and_t(rdev, ni_init_driver_calculated_leakage_table()
3093 static int ni_init_simplified_leakage_table(struct radeon_device *rdev, ni_init_simplified_leakage_table() argument
3096 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_init_simplified_leakage_table()
3098 &rdev->pm.dpm.dyn_state.cac_leakage_table; ni_init_simplified_leakage_table()
3118 scaling_factor = ni_get_smc_power_scaling_factor(rdev); ni_init_simplified_leakage_table()
3139 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev) ni_initialize_smc_cac_tables() argument
3141 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_initialize_smc_cac_tables()
3142 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_initialize_smc_cac_tables()
3165 ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage; ni_initialize_smc_cac_tables()
3177 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables); ni_initialize_smc_cac_tables()
3179 ret = ni_init_simplified_leakage_table(rdev, cac_tables); ni_initialize_smc_cac_tables()
3194 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables, ni_initialize_smc_cac_tables()
3208 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev) ni_initialize_hardware_cac_manager() argument
3210 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_initialize_hardware_cac_manager()
3377 static int ni_enable_smc_cac(struct radeon_device *rdev, ni_enable_smc_cac() argument
3381 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_enable_smc_cac()
3388 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln); ni_enable_smc_cac()
3391 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable); ni_enable_smc_cac()
3396 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac); ni_enable_smc_cac()
3403 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac); ni_enable_smc_cac()
3408 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable); ni_enable_smc_cac()
3418 static int ni_pcie_performance_request(struct radeon_device *rdev, ni_pcie_performance_request() argument
3422 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_pcie_performance_request()
3427 radeon_acpi_pcie_notify_device_ready(rdev); ni_pcie_performance_request()
3429 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); ni_pcie_performance_request()
3433 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); ni_pcie_performance_request()
3439 static int ni_advertise_gen2_capability(struct radeon_device *rdev) ni_advertise_gen2_capability() argument
3441 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_advertise_gen2_capability()
3453 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true); ni_advertise_gen2_capability()
3458 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, ni_enable_bif_dynamic_pcie_gen2() argument
3461 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_enable_bif_dynamic_pcie_gen2()
3497 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev, ni_enable_dynamic_pcie_gen2() argument
3500 ni_enable_bif_dynamic_pcie_gen2(rdev, enable); ni_enable_dynamic_pcie_gen2()
3508 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, ni_set_uvd_clock_before_set_eng_clock() argument
3523 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); ni_set_uvd_clock_before_set_eng_clock()
3526 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, ni_set_uvd_clock_after_set_eng_clock() argument
3541 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); ni_set_uvd_clock_after_set_eng_clock()
3544 void ni_dpm_setup_asic(struct radeon_device *rdev) ni_dpm_setup_asic() argument
3546 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_setup_asic()
3549 r = ni_mc_load_microcode(rdev); ni_dpm_setup_asic()
3552 ni_read_clock_registers(rdev); ni_dpm_setup_asic()
3553 btc_read_arb_registers(rdev); ni_dpm_setup_asic()
3554 rv770_get_memory_type(rdev); ni_dpm_setup_asic()
3556 ni_advertise_gen2_capability(rdev); ni_dpm_setup_asic()
3557 rv770_get_pcie_gen2_status(rdev); ni_dpm_setup_asic()
3558 rv770_enable_acpi_pm(rdev); ni_dpm_setup_asic()
3561 void ni_update_current_ps(struct radeon_device *rdev, ni_update_current_ps() argument
3565 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_update_current_ps()
3566 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_update_current_ps()
3573 void ni_update_requested_ps(struct radeon_device *rdev, ni_update_requested_ps() argument
3577 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_update_requested_ps()
3578 struct ni_power_info *ni_pi = ni_get_pi(rdev); ni_update_requested_ps()
3585 int ni_dpm_enable(struct radeon_device *rdev) ni_dpm_enable() argument
3587 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_dpm_enable()
3588 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_enable()
3589 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; ni_dpm_enable()
3593 ni_cg_clockgating_default(rdev); ni_dpm_enable()
3594 if (btc_dpm_enabled(rdev)) ni_dpm_enable()
3597 ni_mg_clockgating_default(rdev); ni_dpm_enable()
3599 ni_ls_clockgating_default(rdev); ni_dpm_enable()
3601 rv770_enable_voltage_control(rdev, true); ni_dpm_enable()
3602 ret = cypress_construct_voltage_tables(rdev); ni_dpm_enable()
3609 ret = ni_initialize_mc_reg_table(rdev); ni_dpm_enable()
3614 cypress_enable_spread_spectrum(rdev, true); ni_dpm_enable()
3616 rv770_enable_thermal_protection(rdev, true); ni_dpm_enable()
3617 rv770_setup_bsp(rdev); ni_dpm_enable()
3618 rv770_program_git(rdev); ni_dpm_enable()
3619 rv770_program_tp(rdev); ni_dpm_enable()
3620 rv770_program_tpp(rdev); ni_dpm_enable()
3621 rv770_program_sstp(rdev); ni_dpm_enable()
3622 cypress_enable_display_gap(rdev); ni_dpm_enable()
3623 rv770_program_vc(rdev); ni_dpm_enable()
3625 ni_enable_dynamic_pcie_gen2(rdev, true); ni_dpm_enable()
3626 ret = rv770_upload_firmware(rdev); ni_dpm_enable()
3631 ret = ni_process_firmware_header(rdev); ni_dpm_enable()
3636 ret = ni_initial_switch_from_arb_f0_to_f1(rdev); ni_dpm_enable()
3641 ret = ni_init_smc_table(rdev); ni_dpm_enable()
3646 ret = ni_init_smc_spll_table(rdev); ni_dpm_enable()
3651 ret = ni_init_arb_table_index(rdev); ni_dpm_enable()
3657 ret = ni_populate_mc_reg_table(rdev, boot_ps); ni_dpm_enable()
3663 ret = ni_initialize_smc_cac_tables(rdev); ni_dpm_enable()
3668 ret = ni_initialize_hardware_cac_manager(rdev); ni_dpm_enable()
3673 ret = ni_populate_smc_tdp_limits(rdev, boot_ps); ni_dpm_enable()
3678 ni_program_response_times(rdev); ni_dpm_enable()
3679 r7xx_start_smc(rdev); ni_dpm_enable()
3680 ret = cypress_notify_smc_display_change(rdev, false); ni_dpm_enable()
3685 cypress_enable_sclk_control(rdev, true); ni_dpm_enable()
3687 cypress_enable_mclk_control(rdev, true); ni_dpm_enable()
3688 cypress_start_dpm(rdev); ni_dpm_enable()
3690 ni_gfx_clockgating_enable(rdev, true); ni_dpm_enable()
3692 ni_mg_clockgating_enable(rdev, true); ni_dpm_enable()
3694 ni_ls_clockgating_enable(rdev, true); ni_dpm_enable()
3696 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); ni_dpm_enable()
3698 ni_update_current_ps(rdev, boot_ps); ni_dpm_enable()
3703 void ni_dpm_disable(struct radeon_device *rdev) ni_dpm_disable() argument
3705 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_dpm_disable()
3706 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_disable()
3707 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; ni_dpm_disable()
3709 if (!btc_dpm_enabled(rdev)) ni_dpm_disable()
3711 rv770_clear_vc(rdev); ni_dpm_disable()
3713 rv770_enable_thermal_protection(rdev, false); ni_dpm_disable()
3714 ni_enable_power_containment(rdev, boot_ps, false); ni_dpm_disable()
3715 ni_enable_smc_cac(rdev, boot_ps, false); ni_dpm_disable()
3716 cypress_enable_spread_spectrum(rdev, false); ni_dpm_disable()
3717 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false); ni_dpm_disable()
3719 ni_enable_dynamic_pcie_gen2(rdev, false); ni_dpm_disable()
3721 if (rdev->irq.installed && ni_dpm_disable()
3722 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { ni_dpm_disable()
3723 rdev->irq.dpm_thermal = false; ni_dpm_disable()
3724 radeon_irq_set(rdev); ni_dpm_disable()
3728 ni_gfx_clockgating_enable(rdev, false); ni_dpm_disable()
3730 ni_mg_clockgating_enable(rdev, false); ni_dpm_disable()
3732 ni_ls_clockgating_enable(rdev, false); ni_dpm_disable()
3733 ni_stop_dpm(rdev); ni_dpm_disable()
3734 btc_reset_to_default(rdev); ni_dpm_disable()
3735 ni_stop_smc(rdev); ni_dpm_disable()
3736 ni_force_switch_to_arb_f0(rdev); ni_dpm_disable()
3738 ni_update_current_ps(rdev, boot_ps); ni_dpm_disable()
3741 static int ni_power_control_set_level(struct radeon_device *rdev) ni_power_control_set_level() argument
3743 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; ni_power_control_set_level()
3746 ret = ni_restrict_performance_levels_before_switch(rdev); ni_power_control_set_level()
3749 ret = rv770_halt_smc(rdev); ni_power_control_set_level()
3752 ret = ni_populate_smc_tdp_limits(rdev, new_ps); ni_power_control_set_level()
3755 ret = rv770_resume_smc(rdev); ni_power_control_set_level()
3758 ret = rv770_set_sw_state(rdev); ni_power_control_set_level()
3765 int ni_dpm_pre_set_power_state(struct radeon_device *rdev) ni_dpm_pre_set_power_state() argument
3767 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_pre_set_power_state()
3768 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; ni_dpm_pre_set_power_state()
3771 ni_update_requested_ps(rdev, new_ps); ni_dpm_pre_set_power_state()
3773 ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps); ni_dpm_pre_set_power_state()
3778 int ni_dpm_set_power_state(struct radeon_device *rdev) ni_dpm_set_power_state() argument
3780 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_set_power_state()
3785 ret = ni_restrict_performance_levels_before_switch(rdev); ni_dpm_set_power_state()
3790 ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); ni_dpm_set_power_state()
3791 ret = ni_enable_power_containment(rdev, new_ps, false); ni_dpm_set_power_state()
3796 ret = ni_enable_smc_cac(rdev, new_ps, false); ni_dpm_set_power_state()
3801 ret = rv770_halt_smc(rdev); ni_dpm_set_power_state()
3807 btc_notify_uvd_to_smc(rdev, new_ps); ni_dpm_set_power_state()
3808 ret = ni_upload_sw_state(rdev, new_ps); ni_dpm_set_power_state()
3814 ret = ni_upload_mc_reg_table(rdev, new_ps); ni_dpm_set_power_state()
3820 ret = ni_program_memory_timing_parameters(rdev, new_ps); ni_dpm_set_power_state()
3825 ret = rv770_resume_smc(rdev); ni_dpm_set_power_state()
3830 ret = rv770_set_sw_state(rdev); ni_dpm_set_power_state()
3835 ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); ni_dpm_set_power_state()
3836 ret = ni_enable_smc_cac(rdev, new_ps, true); ni_dpm_set_power_state()
3841 ret = ni_enable_power_containment(rdev, new_ps, true); ni_dpm_set_power_state()
3848 ret = ni_power_control_set_level(rdev); ni_dpm_set_power_state()
3857 void ni_dpm_post_set_power_state(struct radeon_device *rdev) ni_dpm_post_set_power_state() argument
3859 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_post_set_power_state()
3862 ni_update_current_ps(rdev, new_ps); ni_dpm_post_set_power_state()
3866 void ni_dpm_reset_asic(struct radeon_device *rdev)
3868 ni_restrict_performance_levels_before_switch(rdev);
3869 rv770_set_boot_state(rdev);
3894 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev, ni_parse_pplib_non_clock_info() argument
3915 rdev->pm.dpm.boot_ps = rps; ni_parse_pplib_non_clock_info()
3917 rdev->pm.dpm.uvd_ps = rps; ni_parse_pplib_non_clock_info()
3920 static void ni_parse_pplib_clock_info(struct radeon_device *rdev, ni_parse_pplib_clock_info() argument
3924 struct rv7xx_power_info *pi = rv770_get_pi(rdev); ni_parse_pplib_clock_info()
3925 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_parse_pplib_clock_info()
3969 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); ni_parse_pplib_clock_info()
3970 pl->mclk = rdev->clock.default_mclk; ni_parse_pplib_clock_info()
3971 pl->sclk = rdev->clock.default_sclk; ni_parse_pplib_clock_info()
3978 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk; ni_parse_pplib_clock_info()
3979 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk; ni_parse_pplib_clock_info()
3980 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc; ni_parse_pplib_clock_info()
3981 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci; ni_parse_pplib_clock_info()
3985 static int ni_parse_power_table(struct radeon_device *rdev) ni_parse_power_table() argument
3987 struct radeon_mode_info *mode_info = &rdev->mode_info; ni_parse_power_table()
4003 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * ni_parse_power_table()
4005 if (!rdev->pm.dpm.ps) ni_parse_power_table()
4022 kfree(rdev->pm.dpm.ps); ni_parse_power_table()
4025 rdev->pm.dpm.ps[i].ps_priv = ps; ni_parse_power_table()
4026 ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ni_parse_power_table()
4035 ni_parse_pplib_clock_info(rdev, ni_parse_power_table()
4036 &rdev->pm.dpm.ps[i], j, ni_parse_power_table()
4041 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; ni_parse_power_table()
4045 int ni_dpm_init(struct radeon_device *rdev) ni_dpm_init() argument
4056 rdev->pm.dpm.priv = ni_pi; ni_dpm_init()
4060 rv770_get_max_vddc(rdev); ni_dpm_init()
4068 ret = r600_get_platform_caps(rdev); ni_dpm_init()
4072 ret = ni_parse_power_table(rdev); ni_dpm_init()
4075 ret = r600_parse_extended_power_table(rdev); ni_dpm_init()
4079 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries = ni_dpm_init()
4081 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) { ni_dpm_init()
4082 r600_free_extended_power_table(rdev); ni_dpm_init()
4085 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4; ni_dpm_init()
4086 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0; ni_dpm_init()
4087 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0; ni_dpm_init()
4088 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000; ni_dpm_init()
4089 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720; ni_dpm_init()
4090 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000; ni_dpm_init()
4091 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810; ni_dpm_init()
4092 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000; ni_dpm_init()
4093 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900; ni_dpm_init()
4095 ni_patch_dependency_tables_based_on_leakage(rdev); ni_dpm_init()
4097 if (rdev->pm.dpm.voltage_response_time == 0) ni_dpm_init()
4098 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; ni_dpm_init()
4099 if (rdev->pm.dpm.backbias_response_time == 0) ni_dpm_init()
4100 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; ni_dpm_init()
4102 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, ni_dpm_init()
4126 if (rdev->pdev->device == 0x6707) { ni_dpm_init()
4138 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); ni_dpm_init()
4141 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); ni_dpm_init()
4144 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0); ni_dpm_init()
4146 rv770_get_engine_memory_ss(rdev); ni_dpm_init()
4163 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) ni_dpm_init()
4181 radeon_acpi_is_pcie_performance_request_supported(rdev); ni_dpm_init()
4194 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3; ni_dpm_init()
4195 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200; ni_dpm_init()
4196 rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900; ni_dpm_init()
4197 rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk); ni_dpm_init()
4198 rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk; ni_dpm_init()
4199 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0; ni_dpm_init()
4200 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL; ni_dpm_init()
4201 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500; ni_dpm_init()
4208 switch (rdev->pdev->device) { ni_dpm_init()
4258 if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) || ni_dpm_init()
4259 (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0)) ni_dpm_init()
4260 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc = ni_dpm_init()
4261 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; ni_dpm_init()
4266 void ni_dpm_fini(struct radeon_device *rdev) ni_dpm_fini() argument
4270 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { ni_dpm_fini()
4271 kfree(rdev->pm.dpm.ps[i].ps_priv); ni_dpm_fini()
4273 kfree(rdev->pm.dpm.ps); ni_dpm_fini()
4274 kfree(rdev->pm.dpm.priv); ni_dpm_fini()
4275 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries); ni_dpm_fini()
4276 r600_free_extended_power_table(rdev); ni_dpm_fini()
4279 void ni_dpm_print_power_state(struct radeon_device *rdev, ni_dpm_print_power_state() argument
4291 if (rdev->family >= CHIP_TAHITI) ni_dpm_print_power_state()
4298 r600_dpm_print_ps_status(rdev, rps); ni_dpm_print_power_state()
4301 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, ni_dpm_debugfs_print_current_performance_level() argument
4304 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_debugfs_print_current_performance_level()
4322 u32 ni_dpm_get_current_sclk(struct radeon_device *rdev) ni_dpm_get_current_sclk() argument
4324 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_get_current_sclk()
4340 u32 ni_dpm_get_current_mclk(struct radeon_device *rdev) ni_dpm_get_current_mclk() argument
4342 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_get_current_mclk()
4358 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low) ni_dpm_get_sclk() argument
4360 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_get_sclk()
4369 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low) ni_dpm_get_mclk() argument
4371 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); ni_dpm_get_mclk()
H A Dbtc_dpm.c51 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
52 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
54 extern int ni_mc_load_microcode(struct radeon_device *rdev);
1228 static u32 btc_get_valid_mclk(struct radeon_device *rdev, btc_get_valid_mclk() argument
1231 return btc_find_valid_clock(&rdev->pm.dpm.dyn_state.valid_mclk_values, btc_get_valid_mclk()
1235 static u32 btc_get_valid_sclk(struct radeon_device *rdev, btc_get_valid_sclk() argument
1238 return btc_find_valid_clock(&rdev->pm.dpm.dyn_state.valid_sclk_values, btc_get_valid_sclk()
1242 void btc_skip_blacklist_clocks(struct radeon_device *rdev, btc_skip_blacklist_clocks() argument
1261 *sclk = btc_get_valid_sclk(rdev, max_sclk, *sclk + 1); btc_skip_blacklist_clocks()
1264 btc_skip_blacklist_clocks(rdev, max_sclk, max_mclk, sclk, mclk); btc_skip_blacklist_clocks()
1269 void btc_adjust_clock_combinations(struct radeon_device *rdev, btc_adjust_clock_combinations() argument
1281 if (((pl->mclk + (pl->sclk - 1)) / pl->sclk) > rdev->pm.dpm.dyn_state.mclk_sclk_ratio) btc_adjust_clock_combinations()
1282 pl->sclk = btc_get_valid_sclk(rdev, btc_adjust_clock_combinations()
1285 (rdev->pm.dpm.dyn_state.mclk_sclk_ratio - 1)) / btc_adjust_clock_combinations()
1286 rdev->pm.dpm.dyn_state.mclk_sclk_ratio); btc_adjust_clock_combinations()
1288 if ((pl->sclk - pl->mclk) > rdev->pm.dpm.dyn_state.sclk_mclk_delta) btc_adjust_clock_combinations()
1289 pl->mclk = btc_get_valid_mclk(rdev, btc_adjust_clock_combinations()
1292 rdev->pm.dpm.dyn_state.sclk_mclk_delta); btc_adjust_clock_combinations()
1308 void btc_apply_voltage_delta_rules(struct radeon_device *rdev, btc_apply_voltage_delta_rules() argument
1312 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_apply_voltage_delta_rules()
1319 if ((*vddc - *vddci) > rdev->pm.dpm.dyn_state.vddc_vddci_delta) { btc_apply_voltage_delta_rules()
1321 (*vddc - rdev->pm.dpm.dyn_state.vddc_vddci_delta)); btc_apply_voltage_delta_rules()
1325 if ((*vddci - *vddc) > rdev->pm.dpm.dyn_state.vddc_vddci_delta) { btc_apply_voltage_delta_rules()
1327 (*vddci - rdev->pm.dpm.dyn_state.vddc_vddci_delta)); btc_apply_voltage_delta_rules()
1333 static void btc_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, btc_enable_bif_dynamic_pcie_gen2() argument
1336 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_enable_bif_dynamic_pcie_gen2()
1375 static void btc_enable_dynamic_pcie_gen2(struct radeon_device *rdev, btc_enable_dynamic_pcie_gen2() argument
1378 btc_enable_bif_dynamic_pcie_gen2(rdev, enable); btc_enable_dynamic_pcie_gen2()
1386 static int btc_disable_ulv(struct radeon_device *rdev) btc_disable_ulv() argument
1388 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_disable_ulv()
1391 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableULV) != PPSMC_Result_OK) btc_disable_ulv()
1397 static int btc_populate_ulv_state(struct radeon_device *rdev, btc_populate_ulv_state() argument
1401 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_populate_ulv_state()
1405 ret = cypress_convert_power_level_to_smc(rdev, btc_populate_ulv_state()
1426 static int btc_populate_smc_acpi_state(struct radeon_device *rdev, btc_populate_smc_acpi_state() argument
1429 int ret = cypress_populate_smc_acpi_state(rdev, table); btc_populate_smc_acpi_state()
1440 void btc_program_mgcg_hw_sequence(struct radeon_device *rdev, btc_program_mgcg_hw_sequence() argument
1454 static void btc_cg_clock_gating_default(struct radeon_device *rdev) btc_cg_clock_gating_default() argument
1459 if (rdev->family == CHIP_BARTS) { btc_cg_clock_gating_default()
1462 } else if (rdev->family == CHIP_TURKS) { btc_cg_clock_gating_default()
1465 } else if (rdev->family == CHIP_CAICOS) { btc_cg_clock_gating_default()
1471 btc_program_mgcg_hw_sequence(rdev, p, count); btc_cg_clock_gating_default()
1474 static void btc_cg_clock_gating_enable(struct radeon_device *rdev, btc_cg_clock_gating_enable() argument
1481 if (rdev->family == CHIP_BARTS) { btc_cg_clock_gating_enable()
1484 } else if (rdev->family == CHIP_TURKS) { btc_cg_clock_gating_enable()
1487 } else if (rdev->family == CHIP_CAICOS) { btc_cg_clock_gating_enable()
1493 if (rdev->family == CHIP_BARTS) { btc_cg_clock_gating_enable()
1496 } else if (rdev->family == CHIP_TURKS) { btc_cg_clock_gating_enable()
1499 } else if (rdev->family == CHIP_CAICOS) { btc_cg_clock_gating_enable()
1506 btc_program_mgcg_hw_sequence(rdev, p, count); btc_cg_clock_gating_enable()
1509 static void btc_mg_clock_gating_default(struct radeon_device *rdev) btc_mg_clock_gating_default() argument
1514 if (rdev->family == CHIP_BARTS) { btc_mg_clock_gating_default()
1517 } else if (rdev->family == CHIP_TURKS) { btc_mg_clock_gating_default()
1520 } else if (rdev->family == CHIP_CAICOS) { btc_mg_clock_gating_default()
1526 btc_program_mgcg_hw_sequence(rdev, p, count); btc_mg_clock_gating_default()
1529 static void btc_mg_clock_gating_enable(struct radeon_device *rdev, btc_mg_clock_gating_enable() argument
1536 if (rdev->family == CHIP_BARTS) { btc_mg_clock_gating_enable()
1539 } else if (rdev->family == CHIP_TURKS) { btc_mg_clock_gating_enable()
1542 } else if (rdev->family == CHIP_CAICOS) { btc_mg_clock_gating_enable()
1548 if (rdev->family == CHIP_BARTS) { btc_mg_clock_gating_enable()
1551 } else if (rdev->family == CHIP_TURKS) { btc_mg_clock_gating_enable()
1554 } else if (rdev->family == CHIP_CAICOS) { btc_mg_clock_gating_enable()
1561 btc_program_mgcg_hw_sequence(rdev, p, count); btc_mg_clock_gating_enable()
1564 static void btc_ls_clock_gating_default(struct radeon_device *rdev) btc_ls_clock_gating_default() argument
1569 if (rdev->family == CHIP_BARTS) { btc_ls_clock_gating_default()
1572 } else if (rdev->family == CHIP_TURKS) { btc_ls_clock_gating_default()
1575 } else if (rdev->family == CHIP_CAICOS) { btc_ls_clock_gating_default()
1581 btc_program_mgcg_hw_sequence(rdev, p, count); btc_ls_clock_gating_default()
1584 static void btc_ls_clock_gating_enable(struct radeon_device *rdev, btc_ls_clock_gating_enable() argument
1591 if (rdev->family == CHIP_BARTS) { btc_ls_clock_gating_enable()
1594 } else if (rdev->family == CHIP_TURKS) { btc_ls_clock_gating_enable()
1597 } else if (rdev->family == CHIP_CAICOS) { btc_ls_clock_gating_enable()
1603 if (rdev->family == CHIP_BARTS) { btc_ls_clock_gating_enable()
1606 } else if (rdev->family == CHIP_TURKS) { btc_ls_clock_gating_enable()
1609 } else if (rdev->family == CHIP_CAICOS) { btc_ls_clock_gating_enable()
1616 btc_program_mgcg_hw_sequence(rdev, p, count); btc_ls_clock_gating_enable()
1619 bool btc_dpm_enabled(struct radeon_device *rdev) btc_dpm_enabled() argument
1621 if (rv770_is_smc_running(rdev)) btc_dpm_enabled()
1627 static int btc_init_smc_table(struct radeon_device *rdev, btc_init_smc_table() argument
1630 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_init_smc_table()
1631 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_init_smc_table()
1637 cypress_populate_smc_voltage_tables(rdev, table); btc_init_smc_table()
1639 switch (rdev->pm.int_thermal_type) { btc_init_smc_table()
1652 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) btc_init_smc_table()
1655 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) btc_init_smc_table()
1658 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) btc_init_smc_table()
1664 ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table); btc_init_smc_table()
1672 ret = btc_populate_smc_acpi_state(rdev, table); btc_init_smc_table()
1677 ret = btc_populate_ulv_state(rdev, table); btc_init_smc_table()
1684 return rv770_copy_bytes_to_smc(rdev, btc_init_smc_table()
1691 static void btc_set_at_for_uvd(struct radeon_device *rdev, btc_set_at_for_uvd() argument
1694 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_set_at_for_uvd()
1695 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_set_at_for_uvd()
1715 void btc_notify_uvd_to_smc(struct radeon_device *rdev, btc_notify_uvd_to_smc() argument
1718 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_notify_uvd_to_smc()
1721 rv770_write_smc_soft_register(rdev, btc_notify_uvd_to_smc()
1725 rv770_write_smc_soft_register(rdev, btc_notify_uvd_to_smc()
1731 int btc_reset_to_default(struct radeon_device *rdev) btc_reset_to_default() argument
1733 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ResetToDefaults) != PPSMC_Result_OK) btc_reset_to_default()
1739 static void btc_stop_smc(struct radeon_device *rdev) btc_stop_smc() argument
1743 for (i = 0; i < rdev->usec_timeout; i++) { btc_stop_smc()
1750 r7xx_stop_smc(rdev); btc_stop_smc()
1753 void btc_read_arb_registers(struct radeon_device *rdev) btc_read_arb_registers() argument
1755 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_read_arb_registers()
1766 static void btc_set_arb0_registers(struct radeon_device *rdev, btc_set_arb0_registers() argument
1783 static void btc_set_boot_state_timing(struct radeon_device *rdev) btc_set_boot_state_timing() argument
1785 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_set_boot_state_timing()
1788 btc_set_arb0_registers(rdev, &eg_pi->bootup_arb_registers); btc_set_boot_state_timing()
1791 static bool btc_is_state_ulv_compatible(struct radeon_device *rdev, btc_is_state_ulv_compatible() argument
1795 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_is_state_ulv_compatible()
1810 static int btc_set_ulv_dram_timing(struct radeon_device *rdev) btc_set_ulv_dram_timing() argument
1813 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_set_ulv_dram_timing()
1816 radeon_atom_set_engine_dram_timings(rdev, btc_set_ulv_dram_timing()
1820 val = rv770_calculate_memory_refresh_rate(rdev, ulv_pl->sclk); btc_set_ulv_dram_timing()
1823 val = cypress_calculate_burst_time(rdev, ulv_pl->sclk, ulv_pl->mclk); btc_set_ulv_dram_timing()
1829 static int btc_enable_ulv(struct radeon_device *rdev) btc_enable_ulv() argument
1831 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableULV) != PPSMC_Result_OK) btc_enable_ulv()
1837 static int btc_set_power_state_conditionally_enable_ulv(struct radeon_device *rdev, btc_set_power_state_conditionally_enable_ulv() argument
1841 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_set_power_state_conditionally_enable_ulv()
1844 if (btc_is_state_ulv_compatible(rdev, radeon_new_state)) { btc_set_power_state_conditionally_enable_ulv()
1846 ret = btc_set_ulv_dram_timing(rdev); btc_set_power_state_conditionally_enable_ulv()
1848 ret = btc_enable_ulv(rdev); btc_set_power_state_conditionally_enable_ulv()
1916 static int btc_set_mc_special_registers(struct radeon_device *rdev, btc_set_mc_special_registers() argument
1919 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_set_mc_special_registers()
2017 static int btc_initialize_mc_reg_table(struct radeon_device *rdev) btc_initialize_mc_reg_table() argument
2021 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_initialize_mc_reg_table()
2023 u8 module_index = rv770_get_memory_module_index(rdev); btc_initialize_mc_reg_table()
2042 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table); btc_initialize_mc_reg_table()
2053 ret = btc_set_mc_special_registers(rdev, eg_table); btc_initialize_mc_reg_table()
2066 static void btc_init_stutter_mode(struct radeon_device *rdev) btc_init_stutter_mode() argument
2068 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_init_stutter_mode()
2082 bool btc_dpm_vblank_too_short(struct radeon_device *rdev) btc_dpm_vblank_too_short() argument
2084 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_dpm_vblank_too_short()
2085 u32 vblank_time = r600_dpm_get_vblank_time(rdev); btc_dpm_vblank_too_short()
2095 static void btc_apply_state_adjust_rules(struct radeon_device *rdev, btc_apply_state_adjust_rules() argument
2104 if ((rdev->pm.dpm.new_active_crtc_count > 1) || btc_apply_state_adjust_rules()
2105 btc_dpm_vblank_too_short(rdev)) btc_apply_state_adjust_rules()
2110 if (rdev->pm.dpm.ac_power) btc_apply_state_adjust_rules()
2111 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; btc_apply_state_adjust_rules()
2113 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; btc_apply_state_adjust_rules()
2115 if (rdev->pm.dpm.ac_power == false) { btc_apply_state_adjust_rules()
2164 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, btc_apply_state_adjust_rules()
2200 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, btc_apply_state_adjust_rules()
2202 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, btc_apply_state_adjust_rules()
2205 btc_adjust_clock_combinations(rdev, max_limits, &ps->low); btc_apply_state_adjust_rules()
2206 btc_adjust_clock_combinations(rdev, max_limits, &ps->medium); btc_apply_state_adjust_rules()
2207 btc_adjust_clock_combinations(rdev, max_limits, &ps->high); btc_apply_state_adjust_rules()
2209 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, btc_apply_state_adjust_rules()
2211 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, btc_apply_state_adjust_rules()
2213 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, btc_apply_state_adjust_rules()
2215 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, btc_apply_state_adjust_rules()
2216 rdev->clock.current_dispclk, max_limits->vddc, &ps->low.vddc); btc_apply_state_adjust_rules()
2218 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, btc_apply_state_adjust_rules()
2220 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, btc_apply_state_adjust_rules()
2222 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, btc_apply_state_adjust_rules()
2224 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, btc_apply_state_adjust_rules()
2225 rdev->clock.current_dispclk, max_limits->vddc, &ps->medium.vddc); btc_apply_state_adjust_rules()
2227 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, btc_apply_state_adjust_rules()
2229 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, btc_apply_state_adjust_rules()
2231 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, btc_apply_state_adjust_rules()
2233 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, btc_apply_state_adjust_rules()
2234 rdev->clock.current_dispclk, max_limits->vddc, &ps->high.vddc); btc_apply_state_adjust_rules()
2236 btc_apply_voltage_delta_rules(rdev, max_limits->vddc, max_limits->vddci, btc_apply_state_adjust_rules()
2238 btc_apply_voltage_delta_rules(rdev, max_limits->vddc, max_limits->vddci, btc_apply_state_adjust_rules()
2240 btc_apply_voltage_delta_rules(rdev, max_limits->vddc, max_limits->vddci, btc_apply_state_adjust_rules()
2243 if ((ps->high.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) && btc_apply_state_adjust_rules()
2244 (ps->medium.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) && btc_apply_state_adjust_rules()
2245 (ps->low.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)) btc_apply_state_adjust_rules()
2250 if (ps->low.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) btc_apply_state_adjust_rules()
2252 if (ps->medium.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) btc_apply_state_adjust_rules()
2254 if (ps->high.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) btc_apply_state_adjust_rules()
2258 static void btc_update_current_ps(struct radeon_device *rdev, btc_update_current_ps() argument
2262 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_update_current_ps()
2269 static void btc_update_requested_ps(struct radeon_device *rdev, btc_update_requested_ps() argument
2273 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_update_requested_ps()
2281 void btc_dpm_reset_asic(struct radeon_device *rdev)
2283 rv770_restrict_performance_levels_before_switch(rdev);
2284 btc_disable_ulv(rdev);
2285 btc_set_boot_state_timing(rdev);
2286 rv770_set_boot_state(rdev);
2290 int btc_dpm_pre_set_power_state(struct radeon_device *rdev) btc_dpm_pre_set_power_state() argument
2292 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_pre_set_power_state()
2293 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; btc_dpm_pre_set_power_state()
2296 btc_update_requested_ps(rdev, new_ps); btc_dpm_pre_set_power_state()
2298 btc_apply_state_adjust_rules(rdev, &eg_pi->requested_rps); btc_dpm_pre_set_power_state()
2303 int btc_dpm_set_power_state(struct radeon_device *rdev) btc_dpm_set_power_state() argument
2305 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_set_power_state()
2310 ret = btc_disable_ulv(rdev); btc_dpm_set_power_state()
2311 btc_set_boot_state_timing(rdev); btc_dpm_set_power_state()
2312 ret = rv770_restrict_performance_levels_before_switch(rdev); btc_dpm_set_power_state()
2318 cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps); btc_dpm_set_power_state()
2320 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); btc_dpm_set_power_state()
2321 ret = rv770_halt_smc(rdev); btc_dpm_set_power_state()
2326 btc_set_at_for_uvd(rdev, new_ps); btc_dpm_set_power_state()
2328 btc_notify_uvd_to_smc(rdev, new_ps); btc_dpm_set_power_state()
2329 ret = cypress_upload_sw_state(rdev, new_ps); btc_dpm_set_power_state()
2335 ret = cypress_upload_mc_reg_table(rdev, new_ps); btc_dpm_set_power_state()
2342 cypress_program_memory_timing_parameters(rdev, new_ps); btc_dpm_set_power_state()
2344 ret = rv770_resume_smc(rdev); btc_dpm_set_power_state()
2349 ret = rv770_set_sw_state(rdev); btc_dpm_set_power_state()
2354 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); btc_dpm_set_power_state()
2357 cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); btc_dpm_set_power_state()
2359 ret = btc_set_power_state_conditionally_enable_ulv(rdev, new_ps); btc_dpm_set_power_state()
2368 void btc_dpm_post_set_power_state(struct radeon_device *rdev) btc_dpm_post_set_power_state() argument
2370 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_post_set_power_state()
2373 btc_update_current_ps(rdev, new_ps); btc_dpm_post_set_power_state()
2376 int btc_dpm_enable(struct radeon_device *rdev) btc_dpm_enable() argument
2378 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_dpm_enable()
2379 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_enable()
2380 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; btc_dpm_enable()
2384 btc_cg_clock_gating_default(rdev); btc_dpm_enable()
2386 if (btc_dpm_enabled(rdev)) btc_dpm_enable()
2390 btc_mg_clock_gating_default(rdev); btc_dpm_enable()
2393 btc_ls_clock_gating_default(rdev); btc_dpm_enable()
2396 rv770_enable_voltage_control(rdev, true); btc_dpm_enable()
2397 ret = cypress_construct_voltage_tables(rdev); btc_dpm_enable()
2405 ret = cypress_get_mvdd_configuration(rdev); btc_dpm_enable()
2413 ret = btc_initialize_mc_reg_table(rdev); btc_dpm_enable()
2418 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) btc_dpm_enable()
2419 rv770_enable_backbias(rdev, true); btc_dpm_enable()
2422 cypress_enable_spread_spectrum(rdev, true); btc_dpm_enable()
2425 rv770_enable_thermal_protection(rdev, true); btc_dpm_enable()
2427 rv770_setup_bsp(rdev); btc_dpm_enable()
2428 rv770_program_git(rdev); btc_dpm_enable()
2429 rv770_program_tp(rdev); btc_dpm_enable()
2430 rv770_program_tpp(rdev); btc_dpm_enable()
2431 rv770_program_sstp(rdev); btc_dpm_enable()
2432 rv770_program_engine_speed_parameters(rdev); btc_dpm_enable()
2433 cypress_enable_display_gap(rdev); btc_dpm_enable()
2434 rv770_program_vc(rdev); btc_dpm_enable()
2437 btc_enable_dynamic_pcie_gen2(rdev, true); btc_dpm_enable()
2439 ret = rv770_upload_firmware(rdev); btc_dpm_enable()
2444 ret = cypress_get_table_locations(rdev); btc_dpm_enable()
2449 ret = btc_init_smc_table(rdev, boot_ps); btc_dpm_enable()
2454 ret = cypress_populate_mc_reg_table(rdev, boot_ps); btc_dpm_enable()
2461 cypress_program_response_times(rdev); btc_dpm_enable()
2462 r7xx_start_smc(rdev); btc_dpm_enable()
2463 ret = cypress_notify_smc_display_change(rdev, false); btc_dpm_enable()
2468 cypress_enable_sclk_control(rdev, true); btc_dpm_enable()
2471 cypress_enable_mclk_control(rdev, true); btc_dpm_enable()
2473 cypress_start_dpm(rdev); btc_dpm_enable()
2476 btc_cg_clock_gating_enable(rdev, true); btc_dpm_enable()
2479 btc_mg_clock_gating_enable(rdev, true); btc_dpm_enable()
2482 btc_ls_clock_gating_enable(rdev, true); btc_dpm_enable()
2484 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); btc_dpm_enable()
2486 btc_init_stutter_mode(rdev); btc_dpm_enable()
2488 btc_update_current_ps(rdev, rdev->pm.dpm.boot_ps); btc_dpm_enable()
2493 void btc_dpm_disable(struct radeon_device *rdev) btc_dpm_disable() argument
2495 struct rv7xx_power_info *pi = rv770_get_pi(rdev); btc_dpm_disable()
2496 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_disable()
2498 if (!btc_dpm_enabled(rdev)) btc_dpm_disable()
2501 rv770_clear_vc(rdev); btc_dpm_disable()
2504 rv770_enable_thermal_protection(rdev, false); btc_dpm_disable()
2507 btc_enable_dynamic_pcie_gen2(rdev, false); btc_dpm_disable()
2509 if (rdev->irq.installed && btc_dpm_disable()
2510 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { btc_dpm_disable()
2511 rdev->irq.dpm_thermal = false; btc_dpm_disable()
2512 radeon_irq_set(rdev); btc_dpm_disable()
2516 btc_cg_clock_gating_enable(rdev, false); btc_dpm_disable()
2519 btc_mg_clock_gating_enable(rdev, false); btc_dpm_disable()
2522 btc_ls_clock_gating_enable(rdev, false); btc_dpm_disable()
2524 rv770_stop_dpm(rdev); btc_dpm_disable()
2525 btc_reset_to_default(rdev); btc_dpm_disable()
2526 btc_stop_smc(rdev); btc_dpm_disable()
2527 cypress_enable_spread_spectrum(rdev, false); btc_dpm_disable()
2529 btc_update_current_ps(rdev, rdev->pm.dpm.boot_ps); btc_dpm_disable()
2532 void btc_dpm_setup_asic(struct radeon_device *rdev) btc_dpm_setup_asic() argument
2534 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_setup_asic()
2537 r = ni_mc_load_microcode(rdev); btc_dpm_setup_asic()
2540 rv770_get_memory_type(rdev); btc_dpm_setup_asic()
2541 rv740_read_clock_registers(rdev); btc_dpm_setup_asic()
2542 btc_read_arb_registers(rdev); btc_dpm_setup_asic()
2543 rv770_read_voltage_smio_registers(rdev); btc_dpm_setup_asic()
2546 cypress_advertise_gen2_capability(rdev); btc_dpm_setup_asic()
2548 rv770_get_pcie_gen2_status(rdev); btc_dpm_setup_asic()
2549 rv770_enable_acpi_pm(rdev); btc_dpm_setup_asic()
2552 int btc_dpm_init(struct radeon_device *rdev) btc_dpm_init() argument
2562 rdev->pm.dpm.priv = eg_pi; btc_dpm_init()
2565 rv770_get_max_vddc(rdev); btc_dpm_init()
2573 ret = r600_get_platform_caps(rdev); btc_dpm_init()
2577 ret = rv7xx_parse_power_table(rdev); btc_dpm_init()
2580 ret = r600_parse_extended_power_table(rdev); btc_dpm_init()
2584 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries = btc_dpm_init()
2586 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) { btc_dpm_init()
2587 r600_free_extended_power_table(rdev); btc_dpm_init()
2590 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4; btc_dpm_init()
2591 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0; btc_dpm_init()
2592 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0; btc_dpm_init()
2593 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000; btc_dpm_init()
2594 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 800; btc_dpm_init()
2595 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000; btc_dpm_init()
2596 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 800; btc_dpm_init()
2597 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000; btc_dpm_init()
2598 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 800; btc_dpm_init()
2600 if (rdev->pm.dpm.voltage_response_time == 0) btc_dpm_init()
2601 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; btc_dpm_init()
2602 if (rdev->pm.dpm.backbias_response_time == 0) btc_dpm_init()
2603 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; btc_dpm_init()
2605 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, btc_dpm_init()
2634 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); btc_dpm_init()
2637 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); btc_dpm_init()
2640 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0); btc_dpm_init()
2642 rv770_get_engine_memory_ss(rdev); btc_dpm_init()
2659 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) btc_dpm_init()
2666 if (rdev->flags & RADEON_IS_MOBILITY) btc_dpm_init()
2680 radeon_acpi_is_pcie_performance_request_supported(rdev); btc_dpm_init()
2685 if (rdev->family == CHIP_BARTS) btc_dpm_init()
2691 if (ASIC_IS_LOMBOK(rdev)) btc_dpm_init()
2698 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 4; btc_dpm_init()
2699 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200; btc_dpm_init()
2700 rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900; btc_dpm_init()
2701 rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk); btc_dpm_init()
2702 rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk; btc_dpm_init()
2703 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0; btc_dpm_init()
2704 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL; btc_dpm_init()
2706 if (rdev->family == CHIP_TURKS) btc_dpm_init()
2707 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 15000; btc_dpm_init()
2709 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 10000; btc_dpm_init()
2712 if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) || btc_dpm_init()
2713 (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0)) btc_dpm_init()
2714 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc = btc_dpm_init()
2715 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; btc_dpm_init()
2720 void btc_dpm_fini(struct radeon_device *rdev) btc_dpm_fini() argument
2724 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { btc_dpm_fini()
2725 kfree(rdev->pm.dpm.ps[i].ps_priv); btc_dpm_fini()
2727 kfree(rdev->pm.dpm.ps); btc_dpm_fini()
2728 kfree(rdev->pm.dpm.priv); btc_dpm_fini()
2729 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries); btc_dpm_fini()
2730 r600_free_extended_power_table(rdev); btc_dpm_fini()
2733 void btc_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, btc_dpm_debugfs_print_current_performance_level() argument
2736 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_debugfs_print_current_performance_level()
2759 u32 btc_dpm_get_current_sclk(struct radeon_device *rdev) btc_dpm_get_current_sclk() argument
2761 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_get_current_sclk()
2782 u32 btc_dpm_get_current_mclk(struct radeon_device *rdev) btc_dpm_get_current_mclk() argument
2784 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_get_current_mclk()
2805 u32 btc_dpm_get_sclk(struct radeon_device *rdev, bool low) btc_dpm_get_sclk() argument
2807 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_get_sclk()
2816 u32 btc_dpm_get_mclk(struct radeon_device *rdev, bool low) btc_dpm_get_mclk() argument
2818 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); btc_dpm_get_mclk()
H A Dkv_smc.c30 int kv_notify_message_to_smu(struct radeon_device *rdev, u32 id) kv_notify_message_to_smu() argument
37 for (i = 0; i < rdev->usec_timeout; i++) { kv_notify_message_to_smu()
54 int kv_dpm_get_enable_mask(struct radeon_device *rdev, u32 *enable_mask) kv_dpm_get_enable_mask() argument
58 ret = kv_notify_message_to_smu(rdev, PPSMC_MSG_SCLKDPM_GetEnabledMask); kv_dpm_get_enable_mask()
66 int kv_send_msg_to_smc_with_parameter(struct radeon_device *rdev, kv_send_msg_to_smc_with_parameter() argument
72 return kv_notify_message_to_smu(rdev, msg); kv_send_msg_to_smc_with_parameter()
75 static int kv_set_smc_sram_address(struct radeon_device *rdev, kv_set_smc_sram_address() argument
89 int kv_read_smc_sram_dword(struct radeon_device *rdev, u32 smc_address, kv_read_smc_sram_dword() argument
94 ret = kv_set_smc_sram_address(rdev, smc_address, limit); kv_read_smc_sram_dword()
102 int kv_smc_dpm_enable(struct radeon_device *rdev, bool enable) kv_smc_dpm_enable() argument
105 return kv_notify_message_to_smu(rdev, PPSMC_MSG_DPM_Enable); kv_smc_dpm_enable()
107 return kv_notify_message_to_smu(rdev, PPSMC_MSG_DPM_Disable); kv_smc_dpm_enable()
110 int kv_smc_bapm_enable(struct radeon_device *rdev, bool enable) kv_smc_bapm_enable() argument
113 return kv_notify_message_to_smu(rdev, PPSMC_MSG_EnableBAPM); kv_smc_bapm_enable()
115 return kv_notify_message_to_smu(rdev, PPSMC_MSG_DisableBAPM); kv_smc_bapm_enable()
118 int kv_copy_bytes_to_smc(struct radeon_device *rdev, kv_copy_bytes_to_smc() argument
135 ret = kv_set_smc_sram_address(rdev, addr, limit); kv_copy_bytes_to_smc()
161 ret = kv_set_smc_sram_address(rdev, addr, limit); kv_copy_bytes_to_smc()
174 ret = kv_set_smc_sram_address(rdev, addr, limit); kv_copy_bytes_to_smc()
189 ret = kv_set_smc_sram_address(rdev, addr, limit); kv_copy_bytes_to_smc()
207 ret = kv_set_smc_sram_address(rdev, addr, limit); kv_copy_bytes_to_smc()
H A Dradeon_sync.c89 int radeon_sync_resv(struct radeon_device *rdev, radeon_sync_resv() argument
103 if (fence && fence->rdev == rdev) radeon_sync_resv()
116 if (fence && fence->rdev == rdev) radeon_sync_resv()
130 * @rdev: radeon_device pointer
137 int radeon_sync_rings(struct radeon_device *rdev, radeon_sync_rings() argument
153 if (!rdev->ring[i].ready) { radeon_sync_rings()
154 dev_err(rdev->dev, "Syncing to a disabled ring!"); radeon_sync_rings()
165 r = radeon_semaphore_create(rdev, &semaphore); radeon_sync_rings()
172 r = radeon_ring_alloc(rdev, &rdev->ring[i], 16); radeon_sync_rings()
177 if (!radeon_semaphore_emit_signal(rdev, i, semaphore)) { radeon_sync_rings()
179 radeon_ring_undo(&rdev->ring[i]); radeon_sync_rings()
187 if (!radeon_semaphore_emit_wait(rdev, ring, semaphore)) { radeon_sync_rings()
189 radeon_ring_undo(&rdev->ring[i]); radeon_sync_rings()
196 radeon_ring_commit(rdev, &rdev->ring[i], false); radeon_sync_rings()
206 * @rdev: radeon_device pointer
212 void radeon_sync_free(struct radeon_device *rdev, radeon_sync_free() argument
219 radeon_semaphore_free(rdev, &sync->semaphores[i], fence); radeon_sync_free()
H A Dradeon_clocks.c35 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev) radeon_legacy_get_engine_clock() argument
37 struct radeon_pll *spll = &rdev->clock.spll; radeon_legacy_get_engine_clock()
65 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev) radeon_legacy_get_memory_clock() argument
67 struct radeon_pll *mpll = &rdev->clock.mpll; radeon_legacy_get_memory_clock()
101 struct radeon_device *rdev = dev->dev_private; radeon_read_clocks_OF() local
102 struct device_node *dp = rdev->pdev->dev.of_node; radeon_read_clocks_OF()
104 struct radeon_pll *p1pll = &rdev->clock.p1pll; radeon_read_clocks_OF()
105 struct radeon_pll *p2pll = &rdev->clock.p2pll; radeon_read_clocks_OF()
106 struct radeon_pll *spll = &rdev->clock.spll; radeon_read_clocks_OF()
107 struct radeon_pll *mpll = &rdev->clock.mpll; radeon_read_clocks_OF()
123 if (rdev->family >= CHIP_R420) { radeon_read_clocks_OF()
143 rdev->clock.max_pixel_clock = 35000; radeon_read_clocks_OF()
152 rdev->clock.default_sclk = (*val) / 10; radeon_read_clocks_OF()
154 rdev->clock.default_sclk = radeon_read_clocks_OF()
155 radeon_legacy_get_engine_clock(rdev); radeon_read_clocks_OF()
159 rdev->clock.default_mclk = (*val) / 10; radeon_read_clocks_OF()
161 rdev->clock.default_mclk = radeon_read_clocks_OF()
162 radeon_legacy_get_memory_clock(rdev); radeon_read_clocks_OF()
177 struct radeon_device *rdev = dev->dev_private; radeon_get_clock_info() local
178 struct radeon_pll *p1pll = &rdev->clock.p1pll; radeon_get_clock_info()
179 struct radeon_pll *p2pll = &rdev->clock.p2pll; radeon_get_clock_info()
180 struct radeon_pll *dcpll = &rdev->clock.dcpll; radeon_get_clock_info()
181 struct radeon_pll *spll = &rdev->clock.spll; radeon_get_clock_info()
182 struct radeon_pll *mpll = &rdev->clock.mpll; radeon_get_clock_info()
185 if (rdev->is_atom_bios) radeon_get_clock_info()
194 if (!ASIC_IS_AVIVO(rdev)) { radeon_get_clock_info()
196 if (ASIC_IS_R300(rdev)) radeon_get_clock_info()
208 if (rdev->family < CHIP_RS600) { radeon_get_clock_info()
217 if (ASIC_IS_AVIVO(rdev)) { radeon_get_clock_info()
223 rdev->clock.max_pixel_clock = 35000; radeon_get_clock_info()
225 if (rdev->flags & RADEON_IS_IGP) { radeon_get_clock_info()
242 if (rdev->family >= CHIP_R420) { radeon_get_clock_info()
266 rdev->clock.default_sclk = radeon_get_clock_info()
267 radeon_legacy_get_engine_clock(rdev); radeon_get_clock_info()
268 rdev->clock.default_mclk = radeon_get_clock_info()
269 radeon_legacy_get_memory_clock(rdev); radeon_get_clock_info()
274 if (ASIC_IS_AVIVO(rdev)) { radeon_get_clock_info()
335 if (!rdev->clock.default_sclk) radeon_get_clock_info()
336 rdev->clock.default_sclk = radeon_get_engine_clock(rdev); radeon_get_clock_info()
337 if ((!rdev->clock.default_mclk) && rdev->asic->pm.get_memory_clock) radeon_get_clock_info()
338 rdev->clock.default_mclk = radeon_get_memory_clock(rdev); radeon_get_clock_info()
340 rdev->pm.current_sclk = rdev->clock.default_sclk; radeon_get_clock_info()
341 rdev->pm.current_mclk = rdev->clock.default_mclk; radeon_get_clock_info()
346 static uint32_t calc_eng_mem_clock(struct radeon_device *rdev, calc_eng_mem_clock() argument
350 struct radeon_pll *spll = &rdev->clock.spll; calc_eng_mem_clock()
385 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, radeon_legacy_set_engine_clock() argument
393 eng_clock = calc_eng_mem_clock(rdev, eng_clock, &fb_div, &post_div); radeon_legacy_set_engine_clock()
471 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable) radeon_legacy_set_clock_gating() argument
476 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_set_clock_gating()
492 } else if (ASIC_IS_R300(rdev)) { radeon_legacy_set_clock_gating()
493 if ((rdev->family == CHIP_RS400) || radeon_legacy_set_clock_gating()
494 (rdev->family == CHIP_RS480)) { radeon_legacy_set_clock_gating()
541 } else if (rdev->family >= CHIP_RV350) { radeon_legacy_set_clock_gating()
616 if (rdev->mc.vram_width == 64) { radeon_legacy_set_clock_gating()
669 if (((rdev->family == CHIP_RV250) && radeon_legacy_set_clock_gating()
673 || ((rdev->family == CHIP_RV100) radeon_legacy_set_clock_gating()
684 if ((rdev->family == CHIP_RV200) || radeon_legacy_set_clock_gating()
685 (rdev->family == CHIP_RV250) || radeon_legacy_set_clock_gating()
686 (rdev->family == CHIP_RV280)) { radeon_legacy_set_clock_gating()
691 if (((rdev->family == CHIP_RV200) || radeon_legacy_set_clock_gating()
692 (rdev->family == CHIP_RV250)) && radeon_legacy_set_clock_gating()
703 if (((rdev->family == CHIP_RV200) || radeon_legacy_set_clock_gating()
704 (rdev->family == CHIP_RV250)) && radeon_legacy_set_clock_gating()
736 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_set_clock_gating()
746 } else if ((rdev->family == CHIP_RS400) || radeon_legacy_set_clock_gating()
747 (rdev->family == CHIP_RS480)) { radeon_legacy_set_clock_gating()
785 } else if (rdev->family >= CHIP_RV350) { radeon_legacy_set_clock_gating()
841 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_set_clock_gating()
853 } else if ((rdev->family == CHIP_R300) || radeon_legacy_set_clock_gating()
854 (rdev->family == CHIP_R350)) { radeon_legacy_set_clock_gating()
866 if ((rdev->family == CHIP_R300) || radeon_legacy_set_clock_gating()
867 (rdev->family == CHIP_R350)) { radeon_legacy_set_clock_gating()
876 if (rdev->flags & RADEON_IS_IGP) { radeon_legacy_set_clock_gating()
884 if ((rdev->family == CHIP_RV200) || radeon_legacy_set_clock_gating()
885 (rdev->family == CHIP_RV250) || radeon_legacy_set_clock_gating()
886 (rdev->family == CHIP_RV280)) { radeon_legacy_set_clock_gating()
H A Dradeon_benchmark.c35 static int radeon_benchmark_do_move(struct radeon_device *rdev, unsigned size, radeon_benchmark_do_move() argument
49 fence = radeon_copy_dma(rdev, saddr, daddr, radeon_benchmark_do_move()
54 fence = radeon_copy_blit(rdev, saddr, daddr, radeon_benchmark_do_move()
87 static void radeon_benchmark_move(struct radeon_device *rdev, unsigned size, radeon_benchmark_move() argument
97 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, sdomain, 0, NULL, NULL, &sobj); radeon_benchmark_move()
109 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, ddomain, 0, NULL, NULL, &dobj); radeon_benchmark_move()
122 if (rdev->asic->copy.dma) { radeon_benchmark_move()
123 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, radeon_benchmark_move()
133 if (rdev->asic->copy.blit) { radeon_benchmark_move()
134 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, radeon_benchmark_move()
167 void radeon_benchmark(struct radeon_device *rdev, int test_number) radeon_benchmark() argument
193 radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_GTT, radeon_benchmark()
195 radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_VRAM, radeon_benchmark()
200 radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_VRAM, radeon_benchmark()
206 radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE, radeon_benchmark()
213 radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE, radeon_benchmark()
220 radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE, radeon_benchmark()
227 radeon_benchmark_move(rdev, common_modes[i], radeon_benchmark()
234 radeon_benchmark_move(rdev, common_modes[i], radeon_benchmark()
241 radeon_benchmark_move(rdev, common_modes[i], radeon_benchmark()
H A Dcik_sdma.c36 u32 cik_gpu_check_soft_reset(struct radeon_device *rdev);
58 * @rdev: radeon_device pointer
63 uint32_t cik_sdma_get_rptr(struct radeon_device *rdev, cik_sdma_get_rptr() argument
68 if (rdev->wb.enabled) { cik_sdma_get_rptr()
69 rptr = rdev->wb.wb[ring->rptr_offs/4]; cik_sdma_get_rptr()
85 * @rdev: radeon_device pointer
90 uint32_t cik_sdma_get_wptr(struct radeon_device *rdev, cik_sdma_get_wptr() argument
106 * @rdev: radeon_device pointer
111 void cik_sdma_set_wptr(struct radeon_device *rdev, cik_sdma_set_wptr() argument
128 * @rdev: radeon_device pointer
133 void cik_sdma_ring_ib_execute(struct radeon_device *rdev, cik_sdma_ring_ib_execute() argument
136 struct radeon_ring *ring = &rdev->ring[ib->ring]; cik_sdma_ring_ib_execute()
139 if (rdev->wb.enabled) { cik_sdma_ring_ib_execute()
164 * @rdev: radeon_device pointer
169 static void cik_sdma_hdp_flush_ring_emit(struct radeon_device *rdev, cik_sdma_hdp_flush_ring_emit() argument
172 struct radeon_ring *ring = &rdev->ring[ridx]; cik_sdma_hdp_flush_ring_emit()
193 * @rdev: radeon_device pointer
200 void cik_sdma_fence_ring_emit(struct radeon_device *rdev, cik_sdma_fence_ring_emit() argument
203 struct radeon_ring *ring = &rdev->ring[fence->ring]; cik_sdma_fence_ring_emit()
204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; cik_sdma_fence_ring_emit()
214 cik_sdma_hdp_flush_ring_emit(rdev, fence->ring); cik_sdma_fence_ring_emit()
220 * @rdev: radeon_device pointer
228 bool cik_sdma_semaphore_ring_emit(struct radeon_device *rdev, cik_sdma_semaphore_ring_emit() argument
246 * @rdev: radeon_device pointer
250 static void cik_sdma_gfx_stop(struct radeon_device *rdev) cik_sdma_gfx_stop() argument
255 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) || cik_sdma_gfx_stop()
256 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX)) cik_sdma_gfx_stop()
257 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); cik_sdma_gfx_stop()
269 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; cik_sdma_gfx_stop()
270 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false; cik_sdma_gfx_stop()
287 * @rdev: radeon_device pointer
291 static void cik_sdma_rlc_stop(struct radeon_device *rdev) cik_sdma_rlc_stop() argument
299 * @rdev: radeon_device pointer
304 static void cik_sdma_ctx_switch_enable(struct radeon_device *rdev, bool enable) cik_sdma_ctx_switch_enable() argument
326 * @rdev: radeon_device pointer
331 void cik_sdma_enable(struct radeon_device *rdev, bool enable) cik_sdma_enable() argument
337 cik_sdma_gfx_stop(rdev); cik_sdma_enable()
338 cik_sdma_rlc_stop(rdev); cik_sdma_enable()
354 cik_sdma_ctx_switch_enable(rdev, enable); cik_sdma_enable()
360 * @rdev: radeon_device pointer
365 static int cik_sdma_gfx_resume(struct radeon_device *rdev) cik_sdma_gfx_resume() argument
375 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cik_sdma_gfx_resume()
379 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cik_sdma_gfx_resume()
401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); cik_sdma_gfx_resume()
403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); cik_sdma_gfx_resume()
405 if (rdev->wb.enabled) cik_sdma_gfx_resume()
426 r = radeon_ring_test(rdev, ring->idx, ring); cik_sdma_gfx_resume()
433 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) || cik_sdma_gfx_resume()
434 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX)) cik_sdma_gfx_resume()
435 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); cik_sdma_gfx_resume()
443 * @rdev: radeon_device pointer
448 static int cik_sdma_rlc_resume(struct radeon_device *rdev) cik_sdma_rlc_resume() argument
457 * @rdev: radeon_device pointer
462 static int cik_sdma_load_microcode(struct radeon_device *rdev) cik_sdma_load_microcode() argument
466 if (!rdev->sdma_fw) cik_sdma_load_microcode()
470 cik_sdma_enable(rdev, false); cik_sdma_load_microcode()
472 if (rdev->new_fw) { cik_sdma_load_microcode()
474 (const struct sdma_firmware_header_v1_0 *)rdev->sdma_fw->data; cik_sdma_load_microcode()
482 (rdev->sdma_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_sdma_load_microcode()
491 (rdev->sdma_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); cik_sdma_load_microcode()
501 fw_data = (const __be32 *)rdev->sdma_fw->data; cik_sdma_load_microcode()
508 fw_data = (const __be32 *)rdev->sdma_fw->data; cik_sdma_load_microcode()
523 * @rdev: radeon_device pointer
528 int cik_sdma_resume(struct radeon_device *rdev) cik_sdma_resume() argument
532 r = cik_sdma_load_microcode(rdev); cik_sdma_resume()
537 cik_sdma_enable(rdev, true); cik_sdma_resume()
540 r = cik_sdma_gfx_resume(rdev); cik_sdma_resume()
543 r = cik_sdma_rlc_resume(rdev); cik_sdma_resume()
553 * @rdev: radeon_device pointer
557 void cik_sdma_fini(struct radeon_device *rdev) cik_sdma_fini() argument
560 cik_sdma_enable(rdev, false); cik_sdma_fini()
561 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); cik_sdma_fini()
562 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]); cik_sdma_fini()
569 * @rdev: radeon_device pointer
579 struct radeon_fence *cik_copy_dma(struct radeon_device *rdev, cik_copy_dma() argument
586 int ring_index = rdev->asic->copy.dma_ring_index; cik_copy_dma()
587 struct radeon_ring *ring = &rdev->ring[ring_index]; cik_copy_dma()
596 r = radeon_ring_lock(rdev, ring, num_loops * 7 + 14); cik_copy_dma()
599 radeon_sync_free(rdev, &sync, NULL); cik_copy_dma()
603 radeon_sync_resv(rdev, &sync, resv, false); cik_copy_dma()
604 radeon_sync_rings(rdev, &sync, ring->idx); cik_copy_dma()
622 r = radeon_fence_emit(rdev, &fence, ring->idx); cik_copy_dma()
624 radeon_ring_unlock_undo(rdev, ring); cik_copy_dma()
625 radeon_sync_free(rdev, &sync, NULL); cik_copy_dma()
629 radeon_ring_unlock_commit(rdev, ring, false); cik_copy_dma()
630 radeon_sync_free(rdev, &sync, fence); cik_copy_dma()
638 * @rdev: radeon_device pointer
645 int cik_sdma_ring_test(struct radeon_device *rdev, cik_sdma_ring_test() argument
659 gpu_addr = rdev->wb.gpu_addr + index; cik_sdma_ring_test()
662 rdev->wb.wb[index/4] = cpu_to_le32(tmp); cik_sdma_ring_test()
664 r = radeon_ring_lock(rdev, ring, 5); cik_sdma_ring_test()
674 radeon_ring_unlock_commit(rdev, ring, false); cik_sdma_ring_test()
676 for (i = 0; i < rdev->usec_timeout; i++) { cik_sdma_ring_test()
677 tmp = le32_to_cpu(rdev->wb.wb[index/4]); cik_sdma_ring_test()
683 if (i < rdev->usec_timeout) { cik_sdma_ring_test()
696 * @rdev: radeon_device pointer
702 int cik_sdma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_ib_test() argument
716 gpu_addr = rdev->wb.gpu_addr + index; cik_sdma_ib_test()
719 rdev->wb.wb[index/4] = cpu_to_le32(tmp); cik_sdma_ib_test()
721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); cik_sdma_ib_test()
734 r = radeon_ib_schedule(rdev, &ib, NULL, false); cik_sdma_ib_test()
736 radeon_ib_free(rdev, &ib); cik_sdma_ib_test()
745 for (i = 0; i < rdev->usec_timeout; i++) { cik_sdma_ib_test()
746 tmp = le32_to_cpu(rdev->wb.wb[index/4]); cik_sdma_ib_test()
751 if (i < rdev->usec_timeout) { cik_sdma_ib_test()
757 radeon_ib_free(rdev, &ib); cik_sdma_ib_test()
764 * @rdev: radeon_device pointer
770 bool cik_sdma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cik_sdma_is_lockup() argument
772 u32 reset_mask = cik_gpu_check_soft_reset(rdev); cik_sdma_is_lockup()
781 radeon_ring_lockup_update(rdev, ring); cik_sdma_is_lockup()
784 return radeon_ring_test_lockup(rdev, ring); cik_sdma_is_lockup()
790 * @rdev: radeon_device pointer
798 void cik_sdma_vm_copy_pages(struct radeon_device *rdev, cik_sdma_vm_copy_pages() argument
826 * @rdev: radeon_device pointer
836 void cik_sdma_vm_write_pages(struct radeon_device *rdev, cik_sdma_vm_write_pages() argument
858 value = radeon_vm_map_gart(rdev, addr); cik_sdma_vm_write_pages()
875 * @rdev: radeon_device pointer
885 void cik_sdma_vm_set_pages(struct radeon_device *rdev, cik_sdma_vm_set_pages() argument
937 * @rdev: radeon_device pointer
942 void cik_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, cik_dma_vm_flush() argument
982 cik_sdma_hdp_flush_ring_emit(rdev, ring->idx); cik_dma_vm_flush()
H A Dradeon_asic.c45 * @rdev: radeon device pointer
52 static uint32_t radeon_invalid_rreg(struct radeon_device *rdev, uint32_t reg) radeon_invalid_rreg() argument
62 * @rdev: radeon device pointer
69 static void radeon_invalid_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) radeon_invalid_wreg() argument
79 * @rdev: radeon device pointer
84 static void radeon_register_accessor_init(struct radeon_device *rdev) radeon_register_accessor_init() argument
86 rdev->mc_rreg = &radeon_invalid_rreg; radeon_register_accessor_init()
87 rdev->mc_wreg = &radeon_invalid_wreg; radeon_register_accessor_init()
88 rdev->pll_rreg = &radeon_invalid_rreg; radeon_register_accessor_init()
89 rdev->pll_wreg = &radeon_invalid_wreg; radeon_register_accessor_init()
90 rdev->pciep_rreg = &radeon_invalid_rreg; radeon_register_accessor_init()
91 rdev->pciep_wreg = &radeon_invalid_wreg; radeon_register_accessor_init()
94 if (rdev->family < CHIP_RV515) { radeon_register_accessor_init()
95 rdev->pcie_reg_mask = 0xff; radeon_register_accessor_init()
97 rdev->pcie_reg_mask = 0x7ff; radeon_register_accessor_init()
100 if (rdev->family <= CHIP_R580) { radeon_register_accessor_init()
101 rdev->pll_rreg = &r100_pll_rreg; radeon_register_accessor_init()
102 rdev->pll_wreg = &r100_pll_wreg; radeon_register_accessor_init()
104 if (rdev->family >= CHIP_R420) { radeon_register_accessor_init()
105 rdev->mc_rreg = &r420_mc_rreg; radeon_register_accessor_init()
106 rdev->mc_wreg = &r420_mc_wreg; radeon_register_accessor_init()
108 if (rdev->family >= CHIP_RV515) { radeon_register_accessor_init()
109 rdev->mc_rreg = &rv515_mc_rreg; radeon_register_accessor_init()
110 rdev->mc_wreg = &rv515_mc_wreg; radeon_register_accessor_init()
112 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480) { radeon_register_accessor_init()
113 rdev->mc_rreg = &rs400_mc_rreg; radeon_register_accessor_init()
114 rdev->mc_wreg = &rs400_mc_wreg; radeon_register_accessor_init()
116 if (rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { radeon_register_accessor_init()
117 rdev->mc_rreg = &rs690_mc_rreg; radeon_register_accessor_init()
118 rdev->mc_wreg = &rs690_mc_wreg; radeon_register_accessor_init()
120 if (rdev->family == CHIP_RS600) { radeon_register_accessor_init()
121 rdev->mc_rreg = &rs600_mc_rreg; radeon_register_accessor_init()
122 rdev->mc_wreg = &rs600_mc_wreg; radeon_register_accessor_init()
124 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) { radeon_register_accessor_init()
125 rdev->mc_rreg = &rs780_mc_rreg; radeon_register_accessor_init()
126 rdev->mc_wreg = &rs780_mc_wreg; radeon_register_accessor_init()
129 if (rdev->family >= CHIP_BONAIRE) { radeon_register_accessor_init()
130 rdev->pciep_rreg = &cik_pciep_rreg; radeon_register_accessor_init()
131 rdev->pciep_wreg = &cik_pciep_wreg; radeon_register_accessor_init()
132 } else if (rdev->family >= CHIP_R600) { radeon_register_accessor_init()
133 rdev->pciep_rreg = &r600_pciep_rreg; radeon_register_accessor_init()
134 rdev->pciep_wreg = &r600_pciep_wreg; radeon_register_accessor_init()
138 static int radeon_invalid_get_allowed_info_register(struct radeon_device *rdev, radeon_invalid_get_allowed_info_register() argument
148 * @rdev: radeon device pointer
153 void radeon_agp_disable(struct radeon_device *rdev) radeon_agp_disable() argument
155 rdev->flags &= ~RADEON_IS_AGP; radeon_agp_disable()
156 if (rdev->family >= CHIP_R600) { radeon_agp_disable()
158 rdev->flags |= RADEON_IS_PCIE; radeon_agp_disable()
159 } else if (rdev->family >= CHIP_RV515 || radeon_agp_disable()
160 rdev->family == CHIP_RV380 || radeon_agp_disable()
161 rdev->family == CHIP_RV410 || radeon_agp_disable()
162 rdev->family == CHIP_R423) { radeon_agp_disable()
164 rdev->flags |= RADEON_IS_PCIE; radeon_agp_disable()
165 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; radeon_agp_disable()
166 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; radeon_agp_disable()
167 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; radeon_agp_disable()
170 rdev->flags |= RADEON_IS_PCI; radeon_agp_disable()
171 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; radeon_agp_disable()
172 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; radeon_agp_disable()
173 rdev->asic->gart.set_page = &r100_pci_gart_set_page; radeon_agp_disable()
175 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024; radeon_agp_disable()
2309 * @rdev: radeon device pointer
2316 int radeon_asic_init(struct radeon_device *rdev) radeon_asic_init() argument
2318 radeon_register_accessor_init(rdev); radeon_asic_init()
2321 if (rdev->flags & RADEON_SINGLE_CRTC) radeon_asic_init()
2322 rdev->num_crtc = 1; radeon_asic_init()
2324 rdev->num_crtc = 2; radeon_asic_init()
2326 rdev->has_uvd = false; radeon_asic_init()
2328 switch (rdev->family) { radeon_asic_init()
2334 rdev->asic = &r100_asic; radeon_asic_init()
2340 rdev->asic = &r200_asic; radeon_asic_init()
2346 if (rdev->flags & RADEON_IS_PCIE) radeon_asic_init()
2347 rdev->asic = &r300_asic_pcie; radeon_asic_init()
2349 rdev->asic = &r300_asic; radeon_asic_init()
2354 rdev->asic = &r420_asic; radeon_asic_init()
2356 if (rdev->bios == NULL) { radeon_asic_init()
2357 rdev->asic->pm.get_engine_clock = &radeon_legacy_get_engine_clock; radeon_asic_init()
2358 rdev->asic->pm.set_engine_clock = &radeon_legacy_set_engine_clock; radeon_asic_init()
2359 rdev->asic->pm.get_memory_clock = &radeon_legacy_get_memory_clock; radeon_asic_init()
2360 rdev->asic->pm.set_memory_clock = NULL; radeon_asic_init()
2361 rdev->asic->display.set_backlight_level = &radeon_legacy_set_backlight_level; radeon_asic_init()
2366 rdev->asic = &rs400_asic; radeon_asic_init()
2369 rdev->asic = &rs600_asic; radeon_asic_init()
2373 rdev->asic = &rs690_asic; radeon_asic_init()
2376 rdev->asic = &rv515_asic; radeon_asic_init()
2383 rdev->asic = &r520_asic; radeon_asic_init()
2386 rdev->asic = &r600_asic; radeon_asic_init()
2393 rdev->asic = &rv6xx_asic; radeon_asic_init()
2394 rdev->has_uvd = true; radeon_asic_init()
2398 rdev->asic = &rs780_asic; radeon_asic_init()
2400 if ((rdev->pdev->device == 0x9616)|| radeon_asic_init()
2401 (rdev->pdev->device == 0x9611)|| radeon_asic_init()
2402 (rdev->pdev->device == 0x9613)|| radeon_asic_init()
2403 (rdev->pdev->device == 0x9711)|| radeon_asic_init()
2404 (rdev->pdev->device == 0x9713)) radeon_asic_init()
2405 rdev->has_uvd = false; radeon_asic_init()
2407 rdev->has_uvd = true; radeon_asic_init()
2413 rdev->asic = &rv770_asic; radeon_asic_init()
2414 rdev->has_uvd = true; radeon_asic_init()
2422 if (rdev->family == CHIP_CEDAR) radeon_asic_init()
2423 rdev->num_crtc = 4; radeon_asic_init()
2425 rdev->num_crtc = 6; radeon_asic_init()
2426 rdev->asic = &evergreen_asic; radeon_asic_init()
2427 rdev->has_uvd = true; radeon_asic_init()
2432 rdev->asic = &sumo_asic; radeon_asic_init()
2433 rdev->has_uvd = true; radeon_asic_init()
2439 if (rdev->family == CHIP_CAICOS) radeon_asic_init()
2440 rdev->num_crtc = 4; radeon_asic_init()
2442 rdev->num_crtc = 6; radeon_asic_init()
2443 rdev->asic = &btc_asic; radeon_asic_init()
2444 rdev->has_uvd = true; radeon_asic_init()
2447 rdev->asic = &cayman_asic; radeon_asic_init()
2449 rdev->num_crtc = 6; radeon_asic_init()
2450 rdev->has_uvd = true; radeon_asic_init()
2453 rdev->asic = &trinity_asic; radeon_asic_init()
2455 rdev->num_crtc = 4; radeon_asic_init()
2456 rdev->has_uvd = true; radeon_asic_init()
2457 rdev->cg_flags = radeon_asic_init()
2465 rdev->asic = &si_asic; radeon_asic_init()
2467 if (rdev->family == CHIP_HAINAN) radeon_asic_init()
2468 rdev->num_crtc = 0; radeon_asic_init()
2469 else if (rdev->family == CHIP_OLAND) radeon_asic_init()
2470 rdev->num_crtc = 2; radeon_asic_init()
2472 rdev->num_crtc = 6; radeon_asic_init()
2473 if (rdev->family == CHIP_HAINAN) radeon_asic_init()
2474 rdev->has_uvd = false; radeon_asic_init()
2476 rdev->has_uvd = true; radeon_asic_init()
2477 switch (rdev->family) { radeon_asic_init()
2479 rdev->cg_flags = radeon_asic_init()
2493 rdev->pg_flags = 0; radeon_asic_init()
2496 rdev->cg_flags = radeon_asic_init()
2512 rdev->pg_flags = 0; radeon_asic_init()
2515 rdev->cg_flags = radeon_asic_init()
2531 rdev->pg_flags = 0 | radeon_asic_init()
2536 rdev->cg_flags = radeon_asic_init()
2551 rdev->pg_flags = 0; radeon_asic_init()
2554 rdev->cg_flags = radeon_asic_init()
2568 rdev->pg_flags = 0; radeon_asic_init()
2571 rdev->cg_flags = 0; radeon_asic_init()
2572 rdev->pg_flags = 0; radeon_asic_init()
2578 rdev->asic = &ci_asic; radeon_asic_init()
2579 rdev->num_crtc = 6; radeon_asic_init()
2580 rdev->has_uvd = true; radeon_asic_init()
2581 if (rdev->family == CHIP_BONAIRE) { radeon_asic_init()
2582 rdev->cg_flags = radeon_asic_init()
2599 rdev->pg_flags = 0; radeon_asic_init()
2601 rdev->cg_flags = radeon_asic_init()
2617 rdev->pg_flags = 0; radeon_asic_init()
2623 rdev->asic = &kv_asic; radeon_asic_init()
2625 if (rdev->family == CHIP_KAVERI) { radeon_asic_init()
2626 rdev->num_crtc = 4; radeon_asic_init()
2627 rdev->cg_flags = radeon_asic_init()
2642 rdev->pg_flags = 0; radeon_asic_init()
2654 rdev->num_crtc = 2; radeon_asic_init()
2655 rdev->cg_flags = radeon_asic_init()
2670 rdev->pg_flags = 0; radeon_asic_init()
2680 rdev->has_uvd = true; radeon_asic_init()
2687 if (rdev->flags & RADEON_IS_IGP) { radeon_asic_init()
2688 rdev->asic->pm.get_memory_clock = NULL; radeon_asic_init()
2689 rdev->asic->pm.set_memory_clock = NULL; radeon_asic_init()
H A Dvce_v1_0.c54 * @rdev: radeon_device pointer
59 uint32_t vce_v1_0_get_rptr(struct radeon_device *rdev, vce_v1_0_get_rptr() argument
71 * @rdev: radeon_device pointer
76 uint32_t vce_v1_0_get_wptr(struct radeon_device *rdev, vce_v1_0_get_wptr() argument
88 * @rdev: radeon_device pointer
93 void vce_v1_0_set_wptr(struct radeon_device *rdev, vce_v1_0_set_wptr() argument
102 void vce_v1_0_enable_mgcg(struct radeon_device *rdev, bool enable) vce_v1_0_enable_mgcg() argument
106 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_VCE_MGCG)) { vce_v1_0_enable_mgcg()
135 static void vce_v1_0_init_cg(struct radeon_device *rdev) vce_v1_0_init_cg() argument
157 int vce_v1_0_load_fw(struct radeon_device *rdev, uint32_t *data) vce_v1_0_load_fw() argument
159 struct vce_v1_0_fw_signature *sign = (void*)rdev->vce_fw->data; vce_v1_0_load_fw()
163 switch (rdev->family) { vce_v1_0_load_fw()
197 memcpy(&data[16], &sign[1], rdev->vce_fw->size - sizeof(*sign)); vce_v1_0_load_fw()
205 rdev->vce.keyselect = le32_to_cpu(sign->val[i].keyselect); vce_v1_0_load_fw()
210 unsigned vce_v1_0_bo_size(struct radeon_device *rdev) vce_v1_0_bo_size() argument
212 WARN_ON(VCE_V1_0_FW_SIZE < rdev->vce_fw->size); vce_v1_0_bo_size()
216 int vce_v1_0_resume(struct radeon_device *rdev) vce_v1_0_resume() argument
218 uint64_t addr = rdev->vce.gpu_addr; vce_v1_0_resume()
254 WREG32(VCE_LMI_FW_START_KEYSEL, rdev->vce.keyselect); vce_v1_0_resume()
277 vce_v1_0_init_cg(rdev); vce_v1_0_resume()
285 * @rdev: radeon_device pointer
289 int vce_v1_0_start(struct radeon_device *rdev) vce_v1_0_start() argument
297 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; vce_v1_0_start()
304 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; vce_v1_0_start()
356 int vce_v1_0_init(struct radeon_device *rdev) vce_v1_0_init() argument
361 r = vce_v1_0_start(rdev); vce_v1_0_init()
365 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; vce_v1_0_init()
367 r = radeon_ring_test(rdev, TN_RING_TYPE_VCE1_INDEX, ring); vce_v1_0_init()
373 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; vce_v1_0_init()
375 r = radeon_ring_test(rdev, TN_RING_TYPE_VCE2_INDEX, ring); vce_v1_0_init()
H A Dradeon_agp.c127 int radeon_agp_init(struct radeon_device *rdev) radeon_agp_init() argument
139 ret = drm_agp_acquire(rdev->ddev); radeon_agp_init()
145 ret = drm_agp_info(rdev->ddev, &info); radeon_agp_init()
147 drm_agp_release(rdev->ddev); radeon_agp_init()
152 if (rdev->ddev->agp->agp_info.aper_size < 32) { radeon_agp_init()
153 drm_agp_release(rdev->ddev); radeon_agp_init()
154 dev_warn(rdev->dev, "AGP aperture too small (%zuM) " radeon_agp_init()
156 rdev->ddev->agp->agp_info.aper_size); radeon_agp_init()
164 if (rdev->family <= CHIP_RV350) radeon_agp_init()
186 rdev->pdev->vendor == p->chip_vendor && radeon_agp_init()
187 rdev->pdev->device == p->chip_device && radeon_agp_init()
188 rdev->pdev->subsystem_vendor == p->subsys_vendor && radeon_agp_init()
189 rdev->pdev->subsystem_device == p->subsys_device) { radeon_agp_init()
237 ret = drm_agp_enable(rdev->ddev, mode); radeon_agp_init()
240 drm_agp_release(rdev->ddev); radeon_agp_init()
244 rdev->mc.agp_base = rdev->ddev->agp->agp_info.aper_base; radeon_agp_init()
245 rdev->mc.gtt_size = rdev->ddev->agp->agp_info.aper_size << 20; radeon_agp_init()
246 rdev->mc.gtt_start = rdev->mc.agp_base; radeon_agp_init()
247 rdev->mc.gtt_end = rdev->mc.gtt_start + rdev->mc.gtt_size - 1; radeon_agp_init()
248 dev_info(rdev->dev, "GTT: %lluM 0x%08llX - 0x%08llX\n", radeon_agp_init()
249 rdev->mc.gtt_size >> 20, rdev->mc.gtt_start, rdev->mc.gtt_end); radeon_agp_init()
252 if (rdev->family < CHIP_R200) { radeon_agp_init()
261 void radeon_agp_resume(struct radeon_device *rdev) radeon_agp_resume() argument
265 if (rdev->flags & RADEON_IS_AGP) { radeon_agp_resume()
266 r = radeon_agp_init(rdev); radeon_agp_resume()
268 dev_warn(rdev->dev, "radeon AGP reinit failed\n"); radeon_agp_resume()
273 void radeon_agp_fini(struct radeon_device *rdev) radeon_agp_fini() argument
276 if (rdev->ddev->agp && rdev->ddev->agp->acquired) { radeon_agp_fini()
277 drm_agp_release(rdev->ddev); radeon_agp_fini()
282 void radeon_agp_suspend(struct radeon_device *rdev) radeon_agp_suspend() argument
284 radeon_agp_fini(rdev); radeon_agp_suspend()
H A Dni_dma.c30 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev);
48 * @rdev: radeon_device pointer
53 uint32_t cayman_dma_get_rptr(struct radeon_device *rdev, cayman_dma_get_rptr() argument
58 if (rdev->wb.enabled) { cayman_dma_get_rptr()
59 rptr = rdev->wb.wb[ring->rptr_offs/4]; cayman_dma_get_rptr()
75 * @rdev: radeon_device pointer
80 uint32_t cayman_dma_get_wptr(struct radeon_device *rdev, cayman_dma_get_wptr() argument
96 * @rdev: radeon_device pointer
101 void cayman_dma_set_wptr(struct radeon_device *rdev, cayman_dma_set_wptr() argument
117 * @rdev: radeon_device pointer
122 void cayman_dma_ring_ib_execute(struct radeon_device *rdev, cayman_dma_ring_ib_execute() argument
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; cayman_dma_ring_ib_execute()
128 if (rdev->wb.enabled) { cayman_dma_ring_ib_execute()
153 * @rdev: radeon_device pointer
157 void cayman_dma_stop(struct radeon_device *rdev) cayman_dma_stop() argument
161 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) || cayman_dma_stop()
162 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX)) cayman_dma_stop()
163 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); cayman_dma_stop()
175 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; cayman_dma_stop()
176 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false; cayman_dma_stop()
182 * @rdev: radeon_device pointer
187 int cayman_dma_resume(struct radeon_device *rdev) cayman_dma_resume() argument
197 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; cayman_dma_resume()
201 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; cayman_dma_resume()
223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); cayman_dma_resume()
225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); cayman_dma_resume()
227 if (rdev->wb.enabled) cayman_dma_resume()
250 r = radeon_ring_test(rdev, ring->idx, ring); cayman_dma_resume()
257 if ((rdev->asic->copy.copy_ring_index == R600_RING_TYPE_DMA_INDEX) || cayman_dma_resume()
258 (rdev->asic->copy.copy_ring_index == CAYMAN_RING_TYPE_DMA1_INDEX)) cayman_dma_resume()
259 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); cayman_dma_resume()
267 * @rdev: radeon_device pointer
271 void cayman_dma_fini(struct radeon_device *rdev) cayman_dma_fini() argument
273 cayman_dma_stop(rdev); cayman_dma_fini()
274 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); cayman_dma_fini()
275 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]); cayman_dma_fini()
281 * @rdev: radeon_device pointer
287 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) cayman_dma_is_lockup() argument
289 u32 reset_mask = cayman_gpu_check_soft_reset(rdev); cayman_dma_is_lockup()
298 radeon_ring_lockup_update(rdev, ring); cayman_dma_is_lockup()
301 return radeon_ring_test_lockup(rdev, ring); cayman_dma_is_lockup()
307 * @rdev: radeon_device pointer
315 void cayman_dma_vm_copy_pages(struct radeon_device *rdev, cayman_dma_vm_copy_pages() argument
343 * @rdev: radeon_device pointer
353 void cayman_dma_vm_write_pages(struct radeon_device *rdev, cayman_dma_vm_write_pages() argument
374 value = radeon_vm_map_gart(rdev, addr); cayman_dma_vm_write_pages()
391 * @rdev: radeon_device pointer
401 void cayman_dma_vm_set_pages(struct radeon_device *rdev, cayman_dma_vm_set_pages() argument
449 void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, cayman_dma_vm_flush() argument
H A Dradeon_atombios.c48 static void radeon_lookup_i2c_gpio_quirks(struct radeon_device *rdev, radeon_lookup_i2c_gpio_quirks() argument
53 if ((rdev->family == CHIP_R420) || radeon_lookup_i2c_gpio_quirks()
54 (rdev->family == CHIP_R423) || radeon_lookup_i2c_gpio_quirks()
55 (rdev->family == CHIP_RV410)) { radeon_lookup_i2c_gpio_quirks()
65 if (ASIC_IS_DCE4(rdev)) { radeon_lookup_i2c_gpio_quirks()
78 if (ASIC_IS_DCE3(rdev)) { radeon_lookup_i2c_gpio_quirks()
129 static struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_device *rdev, radeon_lookup_i2c_gpio() argument
132 struct atom_context *ctx = rdev->mode_info.atom_context; radeon_lookup_i2c_gpio()
152 radeon_lookup_i2c_gpio_quirks(rdev, gpio, i); radeon_lookup_i2c_gpio()
166 void radeon_atombios_i2c_init(struct radeon_device *rdev) radeon_atombios_i2c_init() argument
168 struct atom_context *ctx = rdev->mode_info.atom_context; radeon_atombios_i2c_init()
185 radeon_lookup_i2c_gpio_quirks(rdev, gpio, i); radeon_atombios_i2c_init()
191 rdev->i2c_bus[i] = radeon_i2c_create(rdev->ddev, &i2c, stmp); radeon_atombios_i2c_init()
199 struct radeon_gpio_rec radeon_atombios_lookup_gpio(struct radeon_device *rdev, radeon_atombios_lookup_gpio() argument
202 struct atom_context *ctx = rdev->mode_info.atom_context; radeon_atombios_lookup_gpio()
237 static struct radeon_hpd radeon_atom_get_hpd_info_from_gpio(struct radeon_device *rdev, radeon_atom_get_hpd_info_from_gpio() argument
245 if (ASIC_IS_DCE6(rdev)) radeon_atom_get_hpd_info_from_gpio()
247 else if (ASIC_IS_DCE4(rdev)) radeon_atom_get_hpd_info_from_gpio()
435 struct radeon_device *rdev = dev->dev_private; radeon_atom_apply_quirks() local
436 *i2c_bus = radeon_lookup_i2c_gpio(rdev, 0x93); radeon_atom_apply_quirks()
519 struct radeon_device *rdev = dev->dev_private; radeon_get_atom_connector_info_from_object_table() local
520 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_get_atom_connector_info_from_object_table()
586 if ((rdev->flags & RADEON_IS_IGP) && radeon_get_atom_connector_info_from_object_table()
729 radeon_lookup_i2c_gpio(rdev, radeon_get_atom_connector_info_from_object_table()
791 ddc_bus = radeon_lookup_i2c_gpio(rdev, radeon_get_atom_connector_info_from_object_table()
799 gpio = radeon_atombios_lookup_gpio(rdev, radeon_get_atom_connector_info_from_object_table()
801 hpd = radeon_atom_get_hpd_info_from_gpio(rdev, &gpio); radeon_get_atom_connector_info_from_object_table()
850 struct radeon_device *rdev = dev->dev_private; atombios_get_connector_object_id() local
852 if (rdev->flags & RADEON_IS_IGP) { atombios_get_connector_object_id()
858 struct radeon_mode_info *mode_info = &rdev->mode_info; atombios_get_connector_object_id()
901 struct radeon_device *rdev = dev->dev_private; radeon_get_atom_connector_info_from_supported_devices_table() local
902 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_get_atom_connector_info_from_supported_devices_table()
979 radeon_lookup_i2c_gpio(rdev, radeon_get_atom_connector_info_from_supported_devices_table()
1021 if (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom) radeon_get_atom_connector_info_from_supported_devices_table()
1117 static void radeon_atombios_get_dentist_vco_freq(struct radeon_device *rdev) radeon_atombios_get_dentist_vco_freq() argument
1119 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_dentist_vco_freq()
1129 rdev->clock.vco_freq = radeon_atombios_get_dentist_vco_freq()
1136 struct radeon_device *rdev = dev->dev_private; radeon_atom_get_clock_info() local
1137 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atom_get_clock_info()
1141 struct radeon_pll *p1pll = &rdev->clock.p1pll; radeon_atom_get_clock_info()
1142 struct radeon_pll *p2pll = &rdev->clock.p2pll; radeon_atom_get_clock_info()
1143 struct radeon_pll *dcpll = &rdev->clock.dcpll; radeon_atom_get_clock_info()
1144 struct radeon_pll *spll = &rdev->clock.spll; radeon_atom_get_clock_info()
1145 struct radeon_pll *mpll = &rdev->clock.mpll; radeon_atom_get_clock_info()
1182 if (ASIC_IS_AVIVO(rdev)) radeon_atom_get_clock_info()
1196 if (ASIC_IS_DCE4(rdev)) radeon_atom_get_clock_info()
1211 if (ASIC_IS_AVIVO(rdev)) radeon_atom_get_clock_info()
1223 if (ASIC_IS_DCE4(rdev)) radeon_atom_get_clock_info()
1238 if (ASIC_IS_AVIVO(rdev)) radeon_atom_get_clock_info()
1249 rdev->clock.default_sclk = radeon_atom_get_clock_info()
1251 rdev->clock.default_mclk = radeon_atom_get_clock_info()
1254 if (ASIC_IS_DCE4(rdev)) { radeon_atom_get_clock_info()
1255 rdev->clock.default_dispclk = radeon_atom_get_clock_info()
1257 if (rdev->clock.default_dispclk == 0) { radeon_atom_get_clock_info()
1258 if (ASIC_IS_DCE6(rdev)) radeon_atom_get_clock_info()
1259 rdev->clock.default_dispclk = 60000; /* 600 Mhz */ radeon_atom_get_clock_info()
1260 else if (ASIC_IS_DCE5(rdev)) radeon_atom_get_clock_info()
1261 rdev->clock.default_dispclk = 54000; /* 540 Mhz */ radeon_atom_get_clock_info()
1263 rdev->clock.default_dispclk = 60000; /* 600 Mhz */ radeon_atom_get_clock_info()
1266 if (ASIC_IS_DCE6(rdev) && (rdev->clock.default_dispclk < 53900)) { radeon_atom_get_clock_info()
1268 rdev->clock.default_dispclk / 100); radeon_atom_get_clock_info()
1269 rdev->clock.default_dispclk = 60000; radeon_atom_get_clock_info()
1271 rdev->clock.dp_extclk = radeon_atom_get_clock_info()
1273 rdev->clock.current_dispclk = rdev->clock.default_dispclk; radeon_atom_get_clock_info()
1277 rdev->clock.max_pixel_clock = le16_to_cpu(firmware_info->info.usMaxPixelClock); radeon_atom_get_clock_info()
1278 if (rdev->clock.max_pixel_clock == 0) radeon_atom_get_clock_info()
1279 rdev->clock.max_pixel_clock = 40000; radeon_atom_get_clock_info()
1282 rdev->mode_info.firmware_flags = radeon_atom_get_clock_info()
1285 if (ASIC_IS_DCE8(rdev)) radeon_atom_get_clock_info()
1286 rdev->clock.vco_freq = radeon_atom_get_clock_info()
1288 else if (ASIC_IS_DCE5(rdev)) radeon_atom_get_clock_info()
1289 rdev->clock.vco_freq = rdev->clock.current_dispclk; radeon_atom_get_clock_info()
1290 else if (ASIC_IS_DCE41(rdev)) radeon_atom_get_clock_info()
1291 radeon_atombios_get_dentist_vco_freq(rdev); radeon_atom_get_clock_info()
1293 rdev->clock.vco_freq = rdev->clock.current_dispclk; radeon_atom_get_clock_info()
1295 if (rdev->clock.vco_freq == 0) radeon_atom_get_clock_info()
1296 rdev->clock.vco_freq = 360000; /* 3.6 GHz */ radeon_atom_get_clock_info()
1304 bool radeon_atombios_sideport_present(struct radeon_device *rdev) radeon_atombios_sideport_present() argument
1306 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_sideport_present()
1313 if (rdev->family == CHIP_RS600) radeon_atombios_sideport_present()
1341 struct radeon_device *rdev = dev->dev_private; radeon_atombios_get_tmds_info() local
1342 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_tmds_info()
1386 bool radeon_atombios_get_ppll_ss_info(struct radeon_device *rdev, radeon_atombios_get_ppll_ss_info() argument
1390 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_ppll_ss_info()
1426 static void radeon_atombios_get_igp_ss_overrides(struct radeon_device *rdev, radeon_atombios_get_igp_ss_overrides() argument
1430 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_igp_ss_overrides()
1514 bool radeon_atombios_get_asic_ss_info(struct radeon_device *rdev, radeon_atombios_get_asic_ss_info() argument
1518 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_asic_ss_info()
1527 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_MEMORY_CLOCK_SS_SUPPORT)) radeon_atombios_get_asic_ss_info()
1531 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_ENGINE_CLOCK_SS_SUPPORT)) radeon_atombios_get_asic_ss_info()
1603 if (rdev->flags & RADEON_IS_IGP) radeon_atombios_get_asic_ss_info()
1604 radeon_atombios_get_igp_ss_overrides(rdev, ss, id); radeon_atombios_get_asic_ss_info()
1630 struct radeon_device *rdev = dev->dev_private; radeon_atombios_get_lvds_info() local
1631 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_lvds_info()
1737 rdev->mode_info.bios_hardcoded_edid = edid; radeon_atombios_get_lvds_info()
1738 rdev->mode_info.bios_hardcoded_edid_size = edid_size; radeon_atombios_get_lvds_info()
1770 struct radeon_device *rdev = dev->dev_private; radeon_atombios_get_primary_dac_info() local
1771 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_primary_dac_info()
1797 bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index, radeon_atom_get_tv_timings() argument
1800 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atom_get_tv_timings()
1895 radeon_atombios_get_tv_info(struct radeon_device *rdev) radeon_atombios_get_tv_info() argument
1897 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_tv_info()
1956 struct radeon_device *rdev = dev->dev_private; radeon_atombios_get_tv_dac_info() local
1957 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_tv_dac_info()
1988 tv_dac->tv_std = radeon_atombios_get_tv_info(rdev); radeon_atombios_get_tv_dac_info()
2049 static void radeon_atombios_parse_misc_flags_1_3(struct radeon_device *rdev, radeon_atombios_parse_misc_flags_1_3() argument
2053 rdev->pm.power_state[state_index].misc = misc; radeon_atombios_parse_misc_flags_1_3()
2054 rdev->pm.power_state[state_index].misc2 = misc2; radeon_atombios_parse_misc_flags_1_3()
2057 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2060 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2063 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2066 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2069 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2071 rdev->pm.power_state[state_index].flags &= radeon_atombios_parse_misc_flags_1_3()
2075 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2078 rdev->pm.power_state[state_index].type = radeon_atombios_parse_misc_flags_1_3()
2080 rdev->pm.default_power_state_index = state_index; radeon_atombios_parse_misc_flags_1_3()
2081 rdev->pm.power_state[state_index].default_clock_mode = radeon_atombios_parse_misc_flags_1_3()
2082 &rdev->pm.power_state[state_index].clock_info[0]; radeon_atombios_parse_misc_flags_1_3()
2084 rdev->pm.power_state[state_index].clock_info[0].flags |= radeon_atombios_parse_misc_flags_1_3()
2089 static int radeon_atombios_parse_power_table_1_3(struct radeon_device *rdev) radeon_atombios_parse_power_table_1_3() argument
2091 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_parse_power_table_1_3()
2112 i2c_bus = radeon_lookup_i2c_gpio(rdev, power_info->info.ucOverdriveI2cLine); radeon_atombios_parse_power_table_1_3()
2113 rdev->pm.i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_atombios_parse_power_table_1_3()
2114 if (rdev->pm.i2c_bus) { radeon_atombios_parse_power_table_1_3()
2120 i2c_new_device(&rdev->pm.i2c_bus->adapter, &info); radeon_atombios_parse_power_table_1_3()
2128 rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) * num_modes, GFP_KERNEL); radeon_atombios_parse_power_table_1_3()
2129 if (!rdev->pm.power_state) radeon_atombios_parse_power_table_1_3()
2133 rdev->pm.power_state[state_index].clock_info = radeon_atombios_parse_power_table_1_3()
2135 if (!rdev->pm.power_state[state_index].clock_info) radeon_atombios_parse_power_table_1_3()
2137 rdev->pm.power_state[state_index].num_clock_modes = 1; radeon_atombios_parse_power_table_1_3()
2138 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE; radeon_atombios_parse_power_table_1_3()
2141 rdev->pm.power_state[state_index].clock_info[0].mclk = radeon_atombios_parse_power_table_1_3()
2143 rdev->pm.power_state[state_index].clock_info[0].sclk = radeon_atombios_parse_power_table_1_3()
2146 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) || radeon_atombios_parse_power_table_1_3()
2147 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) radeon_atombios_parse_power_table_1_3()
2149 rdev->pm.power_state[state_index].pcie_lanes = radeon_atombios_parse_power_table_1_3()
2154 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2156 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = radeon_atombios_parse_power_table_1_3()
2157 radeon_atombios_lookup_gpio(rdev, radeon_atombios_parse_power_table_1_3()
2160 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2163 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2166 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2168 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id = radeon_atombios_parse_power_table_1_3()
2171 rdev->pm.power_state[state_index].flags = RADEON_PM_STATE_SINGLE_DISPLAY_ONLY; radeon_atombios_parse_power_table_1_3()
2172 radeon_atombios_parse_misc_flags_1_3(rdev, state_index, misc, 0); radeon_atombios_parse_power_table_1_3()
2176 rdev->pm.power_state[state_index].clock_info[0].mclk = radeon_atombios_parse_power_table_1_3()
2178 rdev->pm.power_state[state_index].clock_info[0].sclk = radeon_atombios_parse_power_table_1_3()
2181 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) || radeon_atombios_parse_power_table_1_3()
2182 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) radeon_atombios_parse_power_table_1_3()
2184 rdev->pm.power_state[state_index].pcie_lanes = radeon_atombios_parse_power_table_1_3()
2190 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2192 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = radeon_atombios_parse_power_table_1_3()
2193 radeon_atombios_lookup_gpio(rdev, radeon_atombios_parse_power_table_1_3()
2196 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2199 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2202 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2204 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id = radeon_atombios_parse_power_table_1_3()
2207 rdev->pm.power_state[state_index].flags = RADEON_PM_STATE_SINGLE_DISPLAY_ONLY; radeon_atombios_parse_power_table_1_3()
2208 radeon_atombios_parse_misc_flags_1_3(rdev, state_index, misc, misc2); radeon_atombios_parse_power_table_1_3()
2212 rdev->pm.power_state[state_index].clock_info[0].mclk = radeon_atombios_parse_power_table_1_3()
2214 rdev->pm.power_state[state_index].clock_info[0].sclk = radeon_atombios_parse_power_table_1_3()
2217 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) || radeon_atombios_parse_power_table_1_3()
2218 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) radeon_atombios_parse_power_table_1_3()
2220 rdev->pm.power_state[state_index].pcie_lanes = radeon_atombios_parse_power_table_1_3()
2226 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2228 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio = radeon_atombios_parse_power_table_1_3()
2229 radeon_atombios_lookup_gpio(rdev, radeon_atombios_parse_power_table_1_3()
2232 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2235 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_atombios_parse_power_table_1_3()
2238 rdev->pm.power_state[state_index].clock_info[0].voltage.type = radeon_atombios_parse_power_table_1_3()
2240 rdev->pm.power_state[state_index].clock_info[0].voltage.vddc_id = radeon_atombios_parse_power_table_1_3()
2243 rdev->pm.power_state[state_index].clock_info[0].voltage.vddci_enabled = radeon_atombios_parse_power_table_1_3()
2245 rdev->pm.power_state[state_index].clock_info[0].voltage.vddci_id = radeon_atombios_parse_power_table_1_3()
2249 rdev->pm.power_state[state_index].flags = RADEON_PM_STATE_SINGLE_DISPLAY_ONLY; radeon_atombios_parse_power_table_1_3()
2250 radeon_atombios_parse_misc_flags_1_3(rdev, state_index, misc, misc2); radeon_atombios_parse_power_table_1_3()
2256 if (rdev->pm.default_power_state_index == -1) { radeon_atombios_parse_power_table_1_3()
2257 rdev->pm.power_state[state_index - 1].type = radeon_atombios_parse_power_table_1_3()
2259 rdev->pm.default_power_state_index = state_index - 1; radeon_atombios_parse_power_table_1_3()
2260 rdev->pm.power_state[state_index - 1].default_clock_mode = radeon_atombios_parse_power_table_1_3()
2261 &rdev->pm.power_state[state_index - 1].clock_info[0]; radeon_atombios_parse_power_table_1_3()
2262 rdev->pm.power_state[state_index].flags &= radeon_atombios_parse_power_table_1_3()
2264 rdev->pm.power_state[state_index].misc = 0; radeon_atombios_parse_power_table_1_3()
2265 rdev->pm.power_state[state_index].misc2 = 0; radeon_atombios_parse_power_table_1_3()
2270 static void radeon_atombios_add_pplib_thermal_controller(struct radeon_device *rdev, radeon_atombios_add_pplib_thermal_controller() argument
2278 rdev->pm.no_fan = true; radeon_atombios_add_pplib_thermal_controller()
2279 rdev->pm.fan_pulses_per_revolution = radeon_atombios_add_pplib_thermal_controller()
2281 if (rdev->pm.fan_pulses_per_revolution) { radeon_atombios_add_pplib_thermal_controller()
2282 rdev->pm.fan_min_rpm = controller->ucFanMinRPM; radeon_atombios_add_pplib_thermal_controller()
2283 rdev->pm.fan_max_rpm = controller->ucFanMaxRPM; radeon_atombios_add_pplib_thermal_controller()
2289 rdev->pm.int_thermal_type = THERMAL_TYPE_RV6XX; radeon_atombios_add_pplib_thermal_controller()
2294 rdev->pm.int_thermal_type = THERMAL_TYPE_RV770; radeon_atombios_add_pplib_thermal_controller()
2299 rdev->pm.int_thermal_type = THERMAL_TYPE_EVERGREEN; radeon_atombios_add_pplib_thermal_controller()
2304 rdev->pm.int_thermal_type = THERMAL_TYPE_SUMO; radeon_atombios_add_pplib_thermal_controller()
2309 rdev->pm.int_thermal_type = THERMAL_TYPE_NI; radeon_atombios_add_pplib_thermal_controller()
2314 rdev->pm.int_thermal_type = THERMAL_TYPE_SI; radeon_atombios_add_pplib_thermal_controller()
2319 rdev->pm.int_thermal_type = THERMAL_TYPE_CI; radeon_atombios_add_pplib_thermal_controller()
2324 rdev->pm.int_thermal_type = THERMAL_TYPE_KV; radeon_atombios_add_pplib_thermal_controller()
2330 rdev->pm.int_thermal_type = THERMAL_TYPE_EXTERNAL_GPIO; radeon_atombios_add_pplib_thermal_controller()
2336 rdev->pm.int_thermal_type = THERMAL_TYPE_ADT7473_WITH_INTERNAL; radeon_atombios_add_pplib_thermal_controller()
2342 rdev->pm.int_thermal_type = THERMAL_TYPE_EMC2103_WITH_INTERNAL; radeon_atombios_add_pplib_thermal_controller()
2349 rdev->pm.int_thermal_type = THERMAL_TYPE_EXTERNAL; radeon_atombios_add_pplib_thermal_controller()
2350 i2c_bus = radeon_lookup_i2c_gpio(rdev, controller->ucI2cLine); radeon_atombios_add_pplib_thermal_controller()
2351 rdev->pm.i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_atombios_add_pplib_thermal_controller()
2352 if (rdev->pm.i2c_bus) { radeon_atombios_add_pplib_thermal_controller()
2357 i2c_new_device(&rdev->pm.i2c_bus->adapter, &info); radeon_atombios_add_pplib_thermal_controller()
2369 void radeon_atombios_get_default_voltages(struct radeon_device *rdev, radeon_atombios_get_default_voltages() argument
2372 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_default_voltages()
2395 static void radeon_atombios_parse_pplib_non_clock_info(struct radeon_device *rdev, radeon_atombios_parse_pplib_non_clock_info() argument
2404 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); radeon_atombios_parse_pplib_non_clock_info()
2406 rdev->pm.power_state[state_index].misc = misc; radeon_atombios_parse_pplib_non_clock_info()
2407 rdev->pm.power_state[state_index].misc2 = misc2; radeon_atombios_parse_pplib_non_clock_info()
2408 rdev->pm.power_state[state_index].pcie_lanes = radeon_atombios_parse_pplib_non_clock_info()
2413 rdev->pm.power_state[state_index].type = radeon_atombios_parse_pplib_non_clock_info()
2417 rdev->pm.power_state[state_index].type = radeon_atombios_parse_pplib_non_clock_info()
2421 rdev->pm.power_state[state_index].type = radeon_atombios_parse_pplib_non_clock_info()
2426 rdev->pm.power_state[state_index].type = radeon_atombios_parse_pplib_non_clock_info()
2430 rdev->pm.power_state[state_index].flags = 0; radeon_atombios_parse_pplib_non_clock_info()
2432 rdev->pm.power_state[state_index].flags |= radeon_atombios_parse_pplib_non_clock_info()
2435 rdev->pm.power_state[state_index].type = radeon_atombios_parse_pplib_non_clock_info()
2437 rdev->pm.default_power_state_index = state_index; radeon_atombios_parse_pplib_non_clock_info()
2438 rdev->pm.power_state[state_index].default_clock_mode = radeon_atombios_parse_pplib_non_clock_info()
2439 &rdev->pm.power_state[state_index].clock_info[mode_index - 1]; radeon_atombios_parse_pplib_non_clock_info()
2440 if ((rdev->family >= CHIP_BARTS) && !(rdev->flags & RADEON_IS_IGP)) { radeon_atombios_parse_pplib_non_clock_info()
2442 rdev->pm.default_sclk = rdev->pm.power_state[state_index].clock_info[0].sclk; radeon_atombios_parse_pplib_non_clock_info()
2443 rdev->pm.default_mclk = rdev->pm.power_state[state_index].clock_info[0].mclk; radeon_atombios_parse_pplib_non_clock_info()
2444 rdev->pm.default_vddc = rdev->pm.power_state[state_index].clock_info[0].voltage.voltage; radeon_atombios_parse_pplib_non_clock_info()
2445 rdev->pm.default_vddci = rdev->pm.power_state[state_index].clock_info[0].voltage.vddci; radeon_atombios_parse_pplib_non_clock_info()
2449 if (ASIC_IS_DCE4(rdev)) radeon_atombios_parse_pplib_non_clock_info()
2450 radeon_atom_get_max_voltage(rdev, radeon_atombios_parse_pplib_non_clock_info()
2455 rdev->pm.power_state[state_index].clock_info[j].mclk = radeon_atombios_parse_pplib_non_clock_info()
2456 rdev->clock.default_mclk; radeon_atombios_parse_pplib_non_clock_info()
2457 rdev->pm.power_state[state_index].clock_info[j].sclk = radeon_atombios_parse_pplib_non_clock_info()
2458 rdev->clock.default_sclk; radeon_atombios_parse_pplib_non_clock_info()
2460 rdev->pm.power_state[state_index].clock_info[j].voltage.voltage = radeon_atombios_parse_pplib_non_clock_info()
2463 rdev->pm.power_state[state_index].clock_info[j].voltage.vddci = radeon_atombios_parse_pplib_non_clock_info()
2470 static bool radeon_atombios_parse_pplib_clock_info(struct radeon_device *rdev, radeon_atombios_parse_pplib_clock_info() argument
2477 if (rdev->flags & RADEON_IS_IGP) { radeon_atombios_parse_pplib_clock_info()
2478 if (rdev->family >= CHIP_PALM) { radeon_atombios_parse_pplib_clock_info()
2481 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2485 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2487 } else if (rdev->family >= CHIP_BONAIRE) { radeon_atombios_parse_pplib_clock_info()
2492 rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk; radeon_atombios_parse_pplib_clock_info()
2493 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2494 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type = radeon_atombios_parse_pplib_clock_info()
2496 } else if (rdev->family >= CHIP_TAHITI) { radeon_atombios_parse_pplib_clock_info()
2501 rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk; radeon_atombios_parse_pplib_clock_info()
2502 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2503 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type = radeon_atombios_parse_pplib_clock_info()
2505 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage = radeon_atombios_parse_pplib_clock_info()
2507 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.vddci = radeon_atombios_parse_pplib_clock_info()
2509 } else if (rdev->family >= CHIP_CEDAR) { radeon_atombios_parse_pplib_clock_info()
2514 rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk; radeon_atombios_parse_pplib_clock_info()
2515 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2516 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type = radeon_atombios_parse_pplib_clock_info()
2518 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage = radeon_atombios_parse_pplib_clock_info()
2520 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.vddci = radeon_atombios_parse_pplib_clock_info()
2527 rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk; radeon_atombios_parse_pplib_clock_info()
2528 rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk; radeon_atombios_parse_pplib_clock_info()
2529 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type = radeon_atombios_parse_pplib_clock_info()
2531 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage = radeon_atombios_parse_pplib_clock_info()
2536 switch (rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage) { radeon_atombios_parse_pplib_clock_info()
2545 if (radeon_atom_get_max_vddc(rdev, VOLTAGE_TYPE_VDDC, radeon_atombios_parse_pplib_clock_info()
2546 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage, radeon_atombios_parse_pplib_clock_info()
2548 rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage = vddc; radeon_atombios_parse_pplib_clock_info()
2554 if (rdev->flags & RADEON_IS_IGP) { radeon_atombios_parse_pplib_clock_info()
2556 if (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0) radeon_atombios_parse_pplib_clock_info()
2560 if ((rdev->pm.power_state[state_index].clock_info[mode_index].mclk == 0) || radeon_atombios_parse_pplib_clock_info()
2561 (rdev->pm.power_state[state_index].clock_info[mode_index].sclk == 0)) radeon_atombios_parse_pplib_clock_info()
2567 static int radeon_atombios_parse_power_table_4_5(struct radeon_device *rdev) radeon_atombios_parse_power_table_4_5() argument
2569 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_parse_power_table_4_5()
2586 radeon_atombios_add_pplib_thermal_controller(rdev, &power_info->pplib.sThermalController); radeon_atombios_parse_power_table_4_5()
2589 rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) * radeon_atombios_parse_power_table_4_5()
2591 if (!rdev->pm.power_state) radeon_atombios_parse_power_table_4_5()
2605 rdev->pm.power_state[i].clock_info = kzalloc(sizeof(struct radeon_pm_clock_info) * radeon_atombios_parse_power_table_4_5()
2609 if (!rdev->pm.power_state[i].clock_info) radeon_atombios_parse_power_table_4_5()
2618 valid = radeon_atombios_parse_pplib_clock_info(rdev, radeon_atombios_parse_power_table_4_5()
2625 rdev->pm.power_state[state_index].clock_info[0].mclk = radeon_atombios_parse_power_table_4_5()
2626 rdev->clock.default_mclk; radeon_atombios_parse_power_table_4_5()
2627 rdev->pm.power_state[state_index].clock_info[0].sclk = radeon_atombios_parse_power_table_4_5()
2628 rdev->clock.default_sclk; radeon_atombios_parse_power_table_4_5()
2631 rdev->pm.power_state[state_index].num_clock_modes = mode_index; radeon_atombios_parse_power_table_4_5()
2633 radeon_atombios_parse_pplib_non_clock_info(rdev, state_index, mode_index, radeon_atombios_parse_power_table_4_5()
2640 if (rdev->pm.power_state[i].num_clock_modes > 1) radeon_atombios_parse_power_table_4_5()
2641 rdev->pm.power_state[i].clock_info[0].flags |= radeon_atombios_parse_power_table_4_5()
2645 if (rdev->pm.default_power_state_index == -1) { radeon_atombios_parse_power_table_4_5()
2646 rdev->pm.power_state[0].type = radeon_atombios_parse_power_table_4_5()
2648 rdev->pm.default_power_state_index = 0; radeon_atombios_parse_power_table_4_5()
2649 rdev->pm.power_state[0].default_clock_mode = radeon_atombios_parse_power_table_4_5()
2650 &rdev->pm.power_state[0].clock_info[0]; radeon_atombios_parse_power_table_4_5()
2655 static int radeon_atombios_parse_power_table_6(struct radeon_device *rdev) radeon_atombios_parse_power_table_6() argument
2657 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_parse_power_table_6()
2678 radeon_atombios_add_pplib_thermal_controller(rdev, &power_info->pplib.sThermalController); radeon_atombios_parse_power_table_6()
2690 rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) * radeon_atombios_parse_power_table_6()
2692 if (!rdev->pm.power_state) radeon_atombios_parse_power_table_6()
2701 rdev->pm.power_state[i].clock_info = kzalloc(sizeof(struct radeon_pm_clock_info) * radeon_atombios_parse_power_table_6()
2705 if (!rdev->pm.power_state[i].clock_info) radeon_atombios_parse_power_table_6()
2712 valid = radeon_atombios_parse_pplib_clock_info(rdev, radeon_atombios_parse_power_table_6()
2719 rdev->pm.power_state[state_index].clock_info[0].mclk = radeon_atombios_parse_power_table_6()
2720 rdev->clock.default_mclk; radeon_atombios_parse_power_table_6()
2721 rdev->pm.power_state[state_index].clock_info[0].sclk = radeon_atombios_parse_power_table_6()
2722 rdev->clock.default_sclk; radeon_atombios_parse_power_table_6()
2725 rdev->pm.power_state[state_index].num_clock_modes = mode_index; radeon_atombios_parse_power_table_6()
2727 radeon_atombios_parse_pplib_non_clock_info(rdev, state_index, mode_index, radeon_atombios_parse_power_table_6()
2735 if (rdev->pm.power_state[i].num_clock_modes > 1) radeon_atombios_parse_power_table_6()
2736 rdev->pm.power_state[i].clock_info[0].flags |= radeon_atombios_parse_power_table_6()
2740 if (rdev->pm.default_power_state_index == -1) { radeon_atombios_parse_power_table_6()
2741 rdev->pm.power_state[0].type = radeon_atombios_parse_power_table_6()
2743 rdev->pm.default_power_state_index = 0; radeon_atombios_parse_power_table_6()
2744 rdev->pm.power_state[0].default_clock_mode = radeon_atombios_parse_power_table_6()
2745 &rdev->pm.power_state[0].clock_info[0]; radeon_atombios_parse_power_table_6()
2750 void radeon_atombios_get_power_modes(struct radeon_device *rdev) radeon_atombios_get_power_modes() argument
2752 struct radeon_mode_info *mode_info = &rdev->mode_info; radeon_atombios_get_power_modes()
2758 rdev->pm.default_power_state_index = -1; radeon_atombios_get_power_modes()
2766 state_index = radeon_atombios_parse_power_table_1_3(rdev); radeon_atombios_get_power_modes()
2770 state_index = radeon_atombios_parse_power_table_4_5(rdev); radeon_atombios_get_power_modes()
2773 state_index = radeon_atombios_parse_power_table_6(rdev); radeon_atombios_get_power_modes()
2781 rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state), GFP_KERNEL); radeon_atombios_get_power_modes()
2782 if (rdev->pm.power_state) { radeon_atombios_get_power_modes()
2783 rdev->pm.power_state[0].clock_info = radeon_atombios_get_power_modes()
2785 if (rdev->pm.power_state[0].clock_info) { radeon_atombios_get_power_modes()
2787 rdev->pm.power_state[state_index].type = radeon_atombios_get_power_modes()
2789 rdev->pm.power_state[state_index].num_clock_modes = 1; radeon_atombios_get_power_modes()
2790 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk; radeon_atombios_get_power_modes()
2791 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk; radeon_atombios_get_power_modes()
2792 rdev->pm.power_state[state_index].default_clock_mode = radeon_atombios_get_power_modes()
2793 &rdev->pm.power_state[state_index].clock_info[0]; radeon_atombios_get_power_modes()
2794 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE; radeon_atombios_get_power_modes()
2795 rdev->pm.power_state[state_index].pcie_lanes = 16; radeon_atombios_get_power_modes()
2796 rdev->pm.default_power_state_index = state_index; radeon_atombios_get_power_modes()
2797 rdev->pm.power_state[state_index].flags = 0; radeon_atombios_get_power_modes()
2803 rdev->pm.num_power_states = state_index; radeon_atombios_get_power_modes()
2805 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index; radeon_atombios_get_power_modes()
2806 rdev->pm.current_clock_mode_index = 0; radeon_atombios_get_power_modes()
2807 if (rdev->pm.default_power_state_index >= 0) radeon_atombios_get_power_modes()
2808 rdev->pm.current_vddc = radeon_atombios_get_power_modes()
2809 rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage; radeon_atombios_get_power_modes()
2811 rdev->pm.current_vddc = 0; radeon_atombios_get_power_modes()
2824 int radeon_atom_get_clock_dividers(struct radeon_device *rdev, radeon_atom_get_clock_dividers() argument
2837 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_get_clock_dividers()
2846 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2856 if (rdev->family <= CHIP_RV770) { radeon_atom_get_clock_dividers()
2860 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2865 if (rdev->family == CHIP_RV770) { radeon_atom_get_clock_dividers()
2875 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2889 if (rdev->family >= CHIP_TAHITI) radeon_atom_get_clock_dividers()
2895 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2914 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2925 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_clock_dividers()
2941 int radeon_atom_get_memory_pll_dividers(struct radeon_device *rdev, radeon_atom_get_memory_pll_dividers() argument
2953 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_get_memory_pll_dividers()
2966 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_memory_pll_dividers()
2992 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable) radeon_atom_set_clock_gating() argument
2999 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_clock_gating()
3002 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev) radeon_atom_get_engine_clock() argument
3007 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_engine_clock()
3011 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev) radeon_atom_get_memory_clock() argument
3016 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_memory_clock()
3020 void radeon_atom_set_engine_clock(struct radeon_device *rdev, radeon_atom_set_engine_clock() argument
3028 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_engine_clock()
3031 void radeon_atom_set_memory_clock(struct radeon_device *rdev, radeon_atom_set_memory_clock() argument
3037 if (rdev->flags & RADEON_IS_IGP) radeon_atom_set_memory_clock()
3042 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_memory_clock()
3045 void radeon_atom_set_engine_dram_timings(struct radeon_device *rdev, radeon_atom_set_engine_dram_timings() argument
3061 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_engine_dram_timings()
3064 void radeon_atom_update_memory_dll(struct radeon_device *rdev, radeon_atom_update_memory_dll() argument
3072 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_update_memory_dll()
3075 void radeon_atom_set_ac_timing(struct radeon_device *rdev, radeon_atom_set_ac_timing() argument
3084 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_ac_timing()
3094 void radeon_atom_set_voltage(struct radeon_device *rdev, u16 voltage_level, u8 voltage_type) radeon_atom_set_voltage() argument
3100 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_set_voltage()
3128 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_set_voltage()
3131 int radeon_atom_get_max_vddc(struct radeon_device *rdev, u8 voltage_type, radeon_atom_get_max_vddc() argument
3138 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_get_max_vddc()
3149 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_max_vddc()
3158 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_max_vddc()
3170 int radeon_atom_get_leakage_vddc_based_on_leakage_idx(struct radeon_device *rdev, radeon_atom_get_leakage_vddc_based_on_leakage_idx() argument
3174 return radeon_atom_get_max_vddc(rdev, VOLTAGE_TYPE_VDDC, leakage_idx, voltage); radeon_atom_get_leakage_vddc_based_on_leakage_idx()
3177 int radeon_atom_get_leakage_id_from_vbios(struct radeon_device *rdev, radeon_atom_get_leakage_id_from_vbios() argument
3184 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_get_leakage_id_from_vbios()
3194 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_leakage_id_from_vbios()
3206 int radeon_atom_get_leakage_vddc_based_on_leakage_params(struct radeon_device *rdev, radeon_atom_get_leakage_vddc_based_on_leakage_params() argument
3221 if (!atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_leakage_vddc_based_on_leakage_params()
3226 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_leakage_vddc_based_on_leakage_params()
3237 (rdev->mode_info.atom_context->bios + data_offset + radeon_atom_get_leakage_vddc_based_on_leakage_params()
3240 (rdev->mode_info.atom_context->bios + data_offset + radeon_atom_get_leakage_vddc_based_on_leakage_params()
3243 (rdev->mode_info.atom_context->bios + data_offset + radeon_atom_get_leakage_vddc_based_on_leakage_params()
3246 (rdev->mode_info.atom_context->bios + data_offset + radeon_atom_get_leakage_vddc_based_on_leakage_params()
3249 (rdev->mode_info.atom_context->bios + data_offset + radeon_atom_get_leakage_vddc_based_on_leakage_params()
3297 int radeon_atom_get_voltage_evv(struct radeon_device *rdev, radeon_atom_get_voltage_evv() argument
3303 u32 count = rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.count; radeon_atom_get_voltage_evv()
3307 if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[entry_id].v == radeon_atom_get_voltage_evv()
3319 cpu_to_le32(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[entry_id].clk); radeon_atom_get_voltage_evv()
3321 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_voltage_evv()
3328 int radeon_atom_get_voltage_gpio_settings(struct radeon_device *rdev, radeon_atom_get_voltage_gpio_settings() argument
3336 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) radeon_atom_get_voltage_gpio_settings()
3347 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_voltage_gpio_settings()
3355 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_get_voltage_gpio_settings()
3431 radeon_atom_is_voltage_gpio(struct radeon_device *rdev, radeon_atom_is_voltage_gpio() argument
3440 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_is_voltage_gpio()
3443 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_is_voltage_gpio()
3489 int radeon_atom_get_svi2_info(struct radeon_device *rdev, radeon_atom_get_svi2_info() argument
3499 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_svi2_info()
3502 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_svi2_info()
3533 int radeon_atom_get_max_voltage(struct radeon_device *rdev, radeon_atom_get_max_voltage() argument
3542 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_max_voltage()
3545 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_max_voltage()
3592 int radeon_atom_get_min_voltage(struct radeon_device *rdev, radeon_atom_get_min_voltage() argument
3601 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_min_voltage()
3604 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_min_voltage()
3642 int radeon_atom_get_voltage_step(struct radeon_device *rdev, radeon_atom_get_voltage_step() argument
3651 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_voltage_step()
3654 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_voltage_step()
3683 int radeon_atom_round_to_true_voltage(struct radeon_device *rdev, radeon_atom_round_to_true_voltage() argument
3690 if (radeon_atom_get_max_voltage(rdev, voltage_type, &max_voltage)) radeon_atom_round_to_true_voltage()
3692 if (radeon_atom_get_min_voltage(rdev, voltage_type, &min_voltage)) radeon_atom_round_to_true_voltage()
3694 if (radeon_atom_get_voltage_step(rdev, voltage_type, &voltage_step)) radeon_atom_round_to_true_voltage()
3709 int radeon_atom_get_voltage_table(struct radeon_device *rdev, radeon_atom_get_voltage_table() argument
3720 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_voltage_table()
3723 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_voltage_table()
3745 ret = radeon_atom_get_voltage_gpio_settings(rdev, radeon_atom_get_voltage_table()
3810 int radeon_atom_get_memory_info(struct radeon_device *rdev, radeon_atom_get_memory_info() argument
3820 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_memory_info()
3823 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_memory_info()
3899 int radeon_atom_get_mclk_range_table(struct radeon_device *rdev, radeon_atom_get_mclk_range_table() argument
3912 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_get_mclk_range_table()
3915 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_get_mclk_range_table()
3972 int radeon_atom_init_mc_reg_table(struct radeon_device *rdev, radeon_atom_init_mc_reg_table() argument
3984 if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size, radeon_atom_init_mc_reg_table()
3987 (rdev->mode_info.atom_context->bios + data_offset); radeon_atom_init_mc_reg_table()
4065 struct radeon_device *rdev = dev->dev_private; radeon_atom_initialize_bios_scratch_regs() local
4068 if (rdev->family >= CHIP_R600) { radeon_atom_initialize_bios_scratch_regs()
4083 if (ASIC_IS_DCE4(rdev)) radeon_atom_initialize_bios_scratch_regs()
4086 if (rdev->family >= CHIP_R600) { radeon_atom_initialize_bios_scratch_regs()
4096 void radeon_save_bios_scratch_regs(struct radeon_device *rdev) radeon_save_bios_scratch_regs() argument
4101 if (rdev->family >= CHIP_R600) radeon_save_bios_scratch_regs()
4107 rdev->bios_scratch[i] = RREG32(scratch_reg + (i * 4)); radeon_save_bios_scratch_regs()
4110 void radeon_restore_bios_scratch_regs(struct radeon_device *rdev) radeon_restore_bios_scratch_regs() argument
4115 if (rdev->family >= CHIP_R600) radeon_restore_bios_scratch_regs()
4121 WREG32(scratch_reg + (i * 4), rdev->bios_scratch[i]); radeon_restore_bios_scratch_regs()
4127 struct radeon_device *rdev = dev->dev_private; radeon_atom_output_lock() local
4130 if (rdev->family >= CHIP_R600) radeon_atom_output_lock()
4143 if (rdev->family >= CHIP_R600) radeon_atom_output_lock()
4156 struct radeon_device *rdev = dev->dev_private; radeon_atombios_connected_scratch_regs() local
4162 if (rdev->family >= CHIP_R600) { radeon_atombios_connected_scratch_regs()
4325 if (rdev->family >= CHIP_R600) { radeon_atombios_connected_scratch_regs()
4340 struct radeon_device *rdev = dev->dev_private; radeon_atombios_encoder_crtc_scratch_regs() local
4344 if (ASIC_IS_DCE4(rdev)) radeon_atombios_encoder_crtc_scratch_regs()
4347 if (rdev->family >= CHIP_R600) radeon_atombios_encoder_crtc_scratch_regs()
4385 if (rdev->family >= CHIP_R600) radeon_atombios_encoder_crtc_scratch_regs()
4395 struct radeon_device *rdev = dev->dev_private; radeon_atombios_encoder_dpms_scratch_regs() local
4399 if (ASIC_IS_DCE4(rdev)) radeon_atombios_encoder_dpms_scratch_regs()
4402 if (rdev->family >= CHIP_R600) radeon_atombios_encoder_dpms_scratch_regs()
4468 if (rdev->family >= CHIP_R600) radeon_atombios_encoder_dpms_scratch_regs()
H A Drv770_dma.c32 * @rdev: radeon_device pointer
42 struct radeon_fence *rv770_copy_dma(struct radeon_device *rdev, rv770_copy_dma() argument
49 int ring_index = rdev->asic->copy.dma_ring_index; rv770_copy_dma()
50 struct radeon_ring *ring = &rdev->ring[ring_index]; rv770_copy_dma()
59 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 8); rv770_copy_dma()
62 radeon_sync_free(rdev, &sync, NULL); rv770_copy_dma()
66 radeon_sync_resv(rdev, &sync, resv, false); rv770_copy_dma()
67 radeon_sync_rings(rdev, &sync, ring->idx); rv770_copy_dma()
83 r = radeon_fence_emit(rdev, &fence, ring->idx); rv770_copy_dma()
85 radeon_ring_unlock_undo(rdev, ring); rv770_copy_dma()
86 radeon_sync_free(rdev, &sync, NULL); rv770_copy_dma()
90 radeon_ring_unlock_commit(rdev, ring, false); rv770_copy_dma()
91 radeon_sync_free(rdev, &sync, fence); rv770_copy_dma()
H A Dradeon_gem.c44 int radeon_gem_object_create(struct radeon_device *rdev, unsigned long size, radeon_gem_object_create() argument
62 max_size = rdev->mc.gtt_size - rdev->gart_pin_size; radeon_gem_object_create()
70 r = radeon_bo_create(rdev, size, alignment, kernel, initial_domain, radeon_gem_object_create()
86 mutex_lock(&rdev->gem.mutex); radeon_gem_object_create()
87 list_add_tail(&robj->list, &rdev->gem.objects); radeon_gem_object_create()
88 mutex_unlock(&rdev->gem.mutex); radeon_gem_object_create()
126 int radeon_gem_init(struct radeon_device *rdev) radeon_gem_init() argument
128 INIT_LIST_HEAD(&rdev->gem.objects); radeon_gem_init()
132 void radeon_gem_fini(struct radeon_device *rdev) radeon_gem_fini() argument
134 radeon_bo_force_delete(rdev); radeon_gem_fini()
144 struct radeon_device *rdev = rbo->rdev; radeon_gem_object_open() local
150 if ((rdev->family < CHIP_CAYMAN) || radeon_gem_object_open()
151 (!rdev->accel_working)) { radeon_gem_object_open()
162 bo_va = radeon_vm_bo_add(rdev, vm, rbo); radeon_gem_object_open()
175 struct radeon_device *rdev = rbo->rdev; radeon_gem_object_close() local
181 if ((rdev->family < CHIP_CAYMAN) || radeon_gem_object_close()
182 (!rdev->accel_working)) { radeon_gem_object_close()
188 dev_err(rdev->dev, "leaking bo va because " radeon_gem_object_close()
195 radeon_vm_bo_rmv(rdev, bo_va); radeon_gem_object_close()
201 static int radeon_gem_handle_lockup(struct radeon_device *rdev, int r) radeon_gem_handle_lockup() argument
204 r = radeon_gpu_reset(rdev); radeon_gem_handle_lockup()
217 struct radeon_device *rdev = dev->dev_private; radeon_gem_info_ioctl() local
221 man = &rdev->mman.bdev.man[TTM_PL_VRAM]; radeon_gem_info_ioctl()
223 args->vram_size = rdev->mc.real_vram_size; radeon_gem_info_ioctl()
225 args->vram_visible -= rdev->vram_pin_size; radeon_gem_info_ioctl()
226 args->gart_size = rdev->mc.gtt_size; radeon_gem_info_ioctl()
227 args->gart_size -= rdev->gart_pin_size; radeon_gem_info_ioctl()
251 struct radeon_device *rdev = dev->dev_private; radeon_gem_create_ioctl() local
257 down_read(&rdev->exclusive_lock); radeon_gem_create_ioctl()
260 r = radeon_gem_object_create(rdev, args->size, args->alignment, radeon_gem_create_ioctl()
264 up_read(&rdev->exclusive_lock); radeon_gem_create_ioctl()
265 r = radeon_gem_handle_lockup(rdev, r); radeon_gem_create_ioctl()
272 up_read(&rdev->exclusive_lock); radeon_gem_create_ioctl()
273 r = radeon_gem_handle_lockup(rdev, r); radeon_gem_create_ioctl()
277 up_read(&rdev->exclusive_lock); radeon_gem_create_ioctl()
284 struct radeon_device *rdev = dev->dev_private; radeon_gem_userptr_ioctl() local
302 if (rdev->family < CHIP_R600) radeon_gem_userptr_ioctl()
313 down_read(&rdev->exclusive_lock); radeon_gem_userptr_ioctl()
316 r = radeon_gem_object_create(rdev, args->size, 0, radeon_gem_userptr_ioctl()
356 up_read(&rdev->exclusive_lock); radeon_gem_userptr_ioctl()
363 up_read(&rdev->exclusive_lock); radeon_gem_userptr_ioctl()
364 r = radeon_gem_handle_lockup(rdev, r); radeon_gem_userptr_ioctl()
374 struct radeon_device *rdev = dev->dev_private; radeon_gem_set_domain_ioctl() local
382 down_read(&rdev->exclusive_lock); radeon_gem_set_domain_ioctl()
387 up_read(&rdev->exclusive_lock); radeon_gem_set_domain_ioctl()
395 up_read(&rdev->exclusive_lock); radeon_gem_set_domain_ioctl()
396 r = radeon_gem_handle_lockup(robj->rdev, r); radeon_gem_set_domain_ioctl()
459 struct radeon_device *rdev = dev->dev_private; radeon_gem_wait_idle_ioctl() local
481 if (rdev->asic->mmio_hdp_flush && radeon_gem_wait_idle_ioctl()
483 robj->rdev->asic->mmio_hdp_flush(rdev); radeon_gem_wait_idle_ioctl()
485 r = radeon_gem_handle_lockup(rdev, r); radeon_gem_wait_idle_ioctl()
533 * @rdev: radeon_device pointer
539 static void radeon_gem_va_update_vm(struct radeon_device *rdev, radeon_gem_va_update_vm() argument
555 vm_bos = radeon_vm_get_bos(rdev, bo_va->vm, &list); radeon_gem_va_update_vm()
572 r = radeon_vm_clear_freed(rdev, bo_va->vm); radeon_gem_va_update_vm()
577 r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem); radeon_gem_va_update_vm()
597 struct radeon_device *rdev = dev->dev_private; radeon_gem_va_ioctl() local
604 if (!rdev->vm_manager.enabled) { radeon_gem_va_ioctl()
678 r = radeon_vm_bo_set_addr(rdev, bo_va, args->offset, args->flags); radeon_gem_va_ioctl()
681 r = radeon_vm_bo_set_addr(rdev, bo_va, 0, 0); radeon_gem_va_ioctl()
687 radeon_gem_va_update_vm(rdev, bo_va); radeon_gem_va_ioctl()
742 struct radeon_device *rdev = dev->dev_private; radeon_mode_dumb_create() local
747 args->pitch = radeon_align_pitch(rdev, args->width, args->bpp, 0) * ((args->bpp + 1) / 8); radeon_mode_dumb_create()
751 r = radeon_gem_object_create(rdev, args->size, 0, radeon_mode_dumb_create()
772 struct radeon_device *rdev = dev->dev_private; radeon_debugfs_gem_info() local
776 mutex_lock(&rdev->gem.mutex); radeon_debugfs_gem_info()
777 list_for_each_entry(rbo, &rdev->gem.objects, list) { radeon_debugfs_gem_info()
799 mutex_unlock(&rdev->gem.mutex); radeon_debugfs_gem_info()
808 int radeon_gem_debugfs_init(struct radeon_device *rdev) radeon_gem_debugfs_init() argument
811 return radeon_debugfs_add_files(rdev, radeon_debugfs_gem_list, 1); radeon_gem_debugfs_init()
H A Dradeon_combios.c133 struct radeon_device *rdev = dev->dev_private; combios_get_table_offset() local
137 if (!rdev->bios) combios_get_table_offset()
362 size = RBIOS8(rdev->bios_header_start + 0x6); combios_get_table_offset()
365 offset = RBIOS16(rdev->bios_header_start + check_offset); combios_get_table_offset()
370 bool radeon_combios_check_hardcoded_edid(struct radeon_device *rdev) radeon_combios_check_hardcoded_edid() argument
375 edid_info = combios_get_table_offset(rdev->ddev, COMBIOS_HARDCODED_EDID_TABLE); radeon_combios_check_hardcoded_edid()
379 raw = rdev->bios + edid_info; radeon_combios_check_hardcoded_edid()
392 rdev->mode_info.bios_hardcoded_edid = edid; radeon_combios_check_hardcoded_edid()
393 rdev->mode_info.bios_hardcoded_edid_size = size; radeon_combios_check_hardcoded_edid()
399 radeon_bios_get_hardcoded_edid(struct radeon_device *rdev) radeon_bios_get_hardcoded_edid() argument
403 if (rdev->mode_info.bios_hardcoded_edid) { radeon_bios_get_hardcoded_edid()
404 edid = kmalloc(rdev->mode_info.bios_hardcoded_edid_size, GFP_KERNEL); radeon_bios_get_hardcoded_edid()
407 (unsigned char *)rdev->mode_info.bios_hardcoded_edid, radeon_bios_get_hardcoded_edid()
408 rdev->mode_info.bios_hardcoded_edid_size); radeon_bios_get_hardcoded_edid()
415 static struct radeon_i2c_bus_rec combios_setup_i2c_bus(struct radeon_device *rdev, combios_setup_i2c_bus() argument
463 if (rdev->family == CHIP_RS300 || combios_setup_i2c_bus()
464 rdev->family == CHIP_RS400 || combios_setup_i2c_bus()
465 rdev->family == CHIP_RS480) combios_setup_i2c_bus()
467 else if (rdev->family == CHIP_R300 || combios_setup_i2c_bus()
468 rdev->family == CHIP_R350) { combios_setup_i2c_bus()
475 if (rdev->family == CHIP_R200 || combios_setup_i2c_bus()
476 rdev->family == CHIP_R300 || combios_setup_i2c_bus()
477 rdev->family == CHIP_R350) { combios_setup_i2c_bus()
480 } else if (rdev->family == CHIP_RS300 || combios_setup_i2c_bus()
481 rdev->family == CHIP_RS400 || combios_setup_i2c_bus()
482 rdev->family == CHIP_RS480) combios_setup_i2c_bus()
484 else if (rdev->family >= CHIP_RV350) { combios_setup_i2c_bus()
554 switch (rdev->family) { combios_setup_i2c_bus()
643 static struct radeon_i2c_bus_rec radeon_combios_get_i2c_info_from_table(struct radeon_device *rdev) radeon_combios_get_i2c_info_from_table() argument
645 struct drm_device *dev = rdev->ddev; radeon_combios_get_i2c_info_from_table()
662 i2c = combios_setup_i2c_bus(rdev, DDC_MONID, radeon_combios_get_i2c_info_from_table()
671 void radeon_combios_i2c_init(struct radeon_device *rdev) radeon_combios_i2c_init() argument
673 struct drm_device *dev = rdev->ddev; radeon_combios_i2c_init()
688 i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_combios_i2c_init()
689 rdev->i2c_bus[0] = radeon_i2c_create(dev, &i2c, "DVI_DDC"); radeon_combios_i2c_init()
691 i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_combios_i2c_init()
692 rdev->i2c_bus[1] = radeon_i2c_create(dev, &i2c, "VGA_DDC"); radeon_combios_i2c_init()
699 rdev->i2c_bus[2] = radeon_i2c_create(dev, &i2c, "MM_I2C"); radeon_combios_i2c_init()
701 if (rdev->family == CHIP_R300 || radeon_combios_i2c_init()
702 rdev->family == CHIP_R350) { radeon_combios_i2c_init()
704 } else if (rdev->family == CHIP_RS300 || radeon_combios_i2c_init()
705 rdev->family == CHIP_RS400 || radeon_combios_i2c_init()
706 rdev->family == CHIP_RS480) { radeon_combios_i2c_init()
708 i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_combios_i2c_init()
709 rdev->i2c_bus[3] = radeon_i2c_create(dev, &i2c, "MONID"); radeon_combios_i2c_init()
712 i2c = radeon_combios_get_i2c_info_from_table(rdev); radeon_combios_i2c_init()
714 rdev->i2c_bus[4] = radeon_i2c_create(dev, &i2c, "GPIOPAD_MASK"); radeon_combios_i2c_init()
715 } else if ((rdev->family == CHIP_R200) || radeon_combios_i2c_init()
716 (rdev->family >= CHIP_R300)) { radeon_combios_i2c_init()
718 i2c = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_combios_i2c_init()
719 rdev->i2c_bus[3] = radeon_i2c_create(dev, &i2c, "MONID"); radeon_combios_i2c_init()
722 i2c = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_combios_i2c_init()
723 rdev->i2c_bus[3] = radeon_i2c_create(dev, &i2c, "MONID"); radeon_combios_i2c_init()
725 i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_combios_i2c_init()
726 rdev->i2c_bus[4] = radeon_i2c_create(dev, &i2c, "CRT2_DDC"); radeon_combios_i2c_init()
732 struct radeon_device *rdev = dev->dev_private; radeon_combios_get_clock_info() local
734 struct radeon_pll *p1pll = &rdev->clock.p1pll; radeon_combios_get_clock_info()
735 struct radeon_pll *p2pll = &rdev->clock.p2pll; radeon_combios_get_clock_info()
736 struct radeon_pll *spll = &rdev->clock.spll; radeon_combios_get_clock_info()
737 struct radeon_pll *mpll = &rdev->clock.mpll; radeon_combios_get_clock_info()
800 rdev->clock.default_sclk = sclk; radeon_combios_get_clock_info()
801 rdev->clock.default_mclk = mclk; radeon_combios_get_clock_info()
804 rdev->clock.max_pixel_clock = RBIOS32(pll_info + 0x16); radeon_combios_get_clock_info()
806 rdev->clock.max_pixel_clock = 35000; /* might need something asic specific */ radeon_combios_get_clock_info()
813 bool radeon_combios_sideport_present(struct radeon_device *rdev) radeon_combios_sideport_present() argument
815 struct drm_device *dev = rdev->ddev; radeon_combios_sideport_present()
819 if (rdev->family == CHIP_RS400) radeon_combios_sideport_present()
852 static void radeon_legacy_get_primary_dac_info_from_table(struct radeon_device *rdev, radeon_legacy_get_primary_dac_info_from_table() argument
855 p_dac->ps2_pdac_adj = default_primarydac_adj[rdev->family]; radeon_legacy_get_primary_dac_info_from_table()
864 struct radeon_device *rdev = dev->dev_private; radeon_combios_get_primary_dac_info() local
910 radeon_legacy_get_primary_dac_info_from_table(rdev, p_dac); radeon_combios_get_primary_dac_info()
916 radeon_combios_get_tv_info(struct radeon_device *rdev) radeon_combios_get_tv_info() argument
918 struct drm_device *dev = rdev->ddev; radeon_combios_get_tv_info()
999 static void radeon_legacy_get_tv_dac_info_from_table(struct radeon_device *rdev, radeon_legacy_get_tv_dac_info_from_table() argument
1002 tv_dac->ps2_tvdac_adj = default_tvdac_adj[rdev->family]; radeon_legacy_get_tv_dac_info_from_table()
1003 if ((rdev->flags & RADEON_IS_MOBILITY) && (rdev->family == CHIP_RV250)) radeon_legacy_get_tv_dac_info_from_table()
1015 struct radeon_device *rdev = dev->dev_private; radeon_combios_get_tv_dac_info() local
1060 tv_dac->tv_std = radeon_combios_get_tv_info(rdev); radeon_combios_get_tv_dac_info()
1095 radeon_legacy_get_tv_dac_info_from_table(rdev, tv_dac); radeon_combios_get_tv_dac_info()
1102 *rdev) radeon_legacy_get_lvds_info_from_regs()
1174 struct radeon_device *rdev = dev->dev_private; radeon_combios_get_lvds_info() local
1286 lvds = radeon_legacy_get_lvds_info_from_regs(rdev); radeon_combios_get_lvds_info()
1319 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_tmds_info_from_table() local
1324 default_tmds_pll[rdev->family][i].value; radeon_legacy_get_tmds_info_from_table()
1325 tmds->tmds_pll[i].freq = default_tmds_pll[rdev->family][i].freq; radeon_legacy_get_tmds_info_from_table()
1335 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_tmds_info_from_combios() local
1388 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_ext_tmds_info_from_table() local
1392 i2c_bus = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_legacy_get_ext_tmds_info_from_table()
1393 tmds->i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_legacy_get_ext_tmds_info_from_table()
1396 switch (rdev->mode_info.connector_table) { radeon_legacy_get_ext_tmds_info_from_table()
1412 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_ext_tmds_info_from_combios() local
1419 if (rdev->flags & RADEON_IS_IGP) { radeon_legacy_get_ext_tmds_info_from_combios()
1420 i2c_bus = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_legacy_get_ext_tmds_info_from_combios()
1421 tmds->i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_legacy_get_ext_tmds_info_from_combios()
1439 i2c_bus = combios_setup_i2c_bus(rdev, gpio, 0, 0); radeon_legacy_get_ext_tmds_info_from_combios()
1440 tmds->i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_legacy_get_ext_tmds_info_from_combios()
1454 struct radeon_device *rdev = dev->dev_private; radeon_get_legacy_connector_info_from_table() local
1458 rdev->mode_info.connector_table = radeon_connector_table; radeon_get_legacy_connector_info_from_table()
1459 if (rdev->mode_info.connector_table == CT_NONE) { radeon_get_legacy_connector_info_from_table()
1463 rdev->mode_info.connector_table = CT_POWERBOOK_VGA; radeon_get_legacy_connector_info_from_table()
1467 rdev->mode_info.connector_table = CT_POWERBOOK_INTERNAL; radeon_get_legacy_connector_info_from_table()
1474 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; radeon_get_legacy_connector_info_from_table()
1477 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; radeon_get_legacy_connector_info_from_table()
1483 rdev->mode_info.connector_table = CT_POWERBOOK_EXTERNAL; radeon_get_legacy_connector_info_from_table()
1491 rdev->mode_info.connector_table = CT_IBOOK; radeon_get_legacy_connector_info_from_table()
1494 rdev->mode_info.connector_table = CT_MAC_G4_SILVER; radeon_get_legacy_connector_info_from_table()
1497 rdev->mode_info.connector_table = CT_EMAC; radeon_get_legacy_connector_info_from_table()
1500 rdev->mode_info.connector_table = CT_MINI_INTERNAL; radeon_get_legacy_connector_info_from_table()
1503 rdev->mode_info.connector_table = CT_MINI_EXTERNAL; radeon_get_legacy_connector_info_from_table()
1507 rdev->mode_info.connector_table = CT_IMAC_G5_ISIGHT; radeon_get_legacy_connector_info_from_table()
1508 } else if ((rdev->pdev->device == 0x4a48) && radeon_get_legacy_connector_info_from_table()
1509 (rdev->pdev->subsystem_vendor == 0x1002) && radeon_get_legacy_connector_info_from_table()
1510 (rdev->pdev->subsystem_device == 0x4a48)) { radeon_get_legacy_connector_info_from_table()
1512 rdev->mode_info.connector_table = CT_MAC_X800; radeon_get_legacy_connector_info_from_table()
1515 (rdev->pdev->device == 0x4150) && radeon_get_legacy_connector_info_from_table()
1516 (rdev->pdev->subsystem_vendor == 0x1002) && radeon_get_legacy_connector_info_from_table()
1517 (rdev->pdev->subsystem_device == 0x4150)) { radeon_get_legacy_connector_info_from_table()
1519 rdev->mode_info.connector_table = CT_MAC_G5_9600; radeon_get_legacy_connector_info_from_table()
1520 } else if ((rdev->pdev->device == 0x4c66) && radeon_get_legacy_connector_info_from_table()
1521 (rdev->pdev->subsystem_vendor == 0x1002) && radeon_get_legacy_connector_info_from_table()
1522 (rdev->pdev->subsystem_device == 0x4c66)) { radeon_get_legacy_connector_info_from_table()
1524 rdev->mode_info.connector_table = CT_SAM440EP; radeon_get_legacy_connector_info_from_table()
1528 if (ASIC_IS_RN50(rdev)) radeon_get_legacy_connector_info_from_table()
1529 rdev->mode_info.connector_table = CT_RN50_POWER; radeon_get_legacy_connector_info_from_table()
1532 rdev->mode_info.connector_table = CT_GENERIC; radeon_get_legacy_connector_info_from_table()
1535 switch (rdev->mode_info.connector_table) { radeon_get_legacy_connector_info_from_table()
1538 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1540 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_get_legacy_connector_info_from_table()
1542 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1555 } else if (rdev->flags & RADEON_IS_MOBILITY) { radeon_get_legacy_connector_info_from_table()
1557 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_NONE_DETECTED, 0, 0); radeon_get_legacy_connector_info_from_table()
1572 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1587 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1608 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1623 if (rdev->family != CHIP_R100 && rdev->family != CHIP_R200) { radeon_get_legacy_connector_info_from_table()
1642 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1644 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1656 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1683 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1685 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1697 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1732 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1734 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1746 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1780 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1782 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1794 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1821 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1823 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_get_legacy_connector_info_from_table()
1858 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1860 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_get_legacy_connector_info_from_table()
1894 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1896 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_get_legacy_connector_info_from_table()
1908 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
1935 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1937 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1949 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_get_legacy_connector_info_from_table()
1976 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
1978 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
1989 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0); radeon_get_legacy_connector_info_from_table()
2003 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
2005 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
2024 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_get_legacy_connector_info_from_table()
2045 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
2047 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
2066 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
2100 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
2102 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_NONE_DETECTED, 0, 0); radeon_get_legacy_connector_info_from_table()
2114 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
2133 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
2161 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
2163 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_table()
2182 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_table()
2209 rdev->mode_info.connector_table); radeon_get_legacy_connector_info_from_table()
2271 struct radeon_device *rdev = dev->dev_private; combios_check_dl_dvi() local
2274 if (rdev->flags & RADEON_IS_IGP) { combios_check_dl_dvi()
2306 struct radeon_device *rdev = dev->dev_private; radeon_get_legacy_connector_info_from_bios() local
2329 ddc_i2c = radeon_combios_get_i2c_info_from_table(rdev); radeon_get_legacy_connector_info_from_bios()
2331 ddc_i2c = combios_setup_i2c_bus(rdev, ddc_type, 0, 0); radeon_get_legacy_connector_info_from_bios()
2509 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0); radeon_get_legacy_connector_info_from_bios()
2529 ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0); radeon_get_legacy_connector_info_from_bios()
2545 if (rdev->flags & RADEON_IS_MOBILITY || rdev->flags & RADEON_IS_IGP) { radeon_get_legacy_connector_info_from_bios()
2564 combios_setup_i2c_bus(rdev, radeon_get_legacy_connector_info_from_bios()
2568 radeon_i2c_add(rdev, &ddc_i2c, "LCD"); radeon_get_legacy_connector_info_from_bios()
2572 combios_setup_i2c_bus(rdev, radeon_get_legacy_connector_info_from_bios()
2576 radeon_i2c_add(rdev, &ddc_i2c, "LCD"); radeon_get_legacy_connector_info_from_bios()
2580 combios_setup_i2c_bus(rdev, ddc_type, 0, 0); radeon_get_legacy_connector_info_from_bios()
2599 if (rdev->family != CHIP_R100 && rdev->family != CHIP_R200) { radeon_get_legacy_connector_info_from_bios()
2635 void radeon_combios_get_power_modes(struct radeon_device *rdev) radeon_combios_get_power_modes() argument
2637 struct drm_device *dev = rdev->ddev; radeon_combios_get_power_modes()
2643 rdev->pm.default_power_state_index = -1; radeon_combios_get_power_modes()
2646 rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) * 2, GFP_KERNEL); radeon_combios_get_power_modes()
2647 if (rdev->pm.power_state) { radeon_combios_get_power_modes()
2649 rdev->pm.power_state[0].clock_info = radeon_combios_get_power_modes()
2651 rdev->pm.power_state[1].clock_info = radeon_combios_get_power_modes()
2653 if (!rdev->pm.power_state[0].clock_info || radeon_combios_get_power_modes()
2654 !rdev->pm.power_state[1].clock_info) radeon_combios_get_power_modes()
2692 i2c_bus = combios_setup_i2c_bus(rdev, gpio, 1 << clk_bit, 1 << data_bit); radeon_combios_get_power_modes()
2694 i2c_bus = combios_setup_i2c_bus(rdev, gpio, 0, 0); radeon_combios_get_power_modes()
2695 rdev->pm.i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_combios_get_power_modes()
2696 if (rdev->pm.i2c_bus) { radeon_combios_get_power_modes()
2701 i2c_new_device(&rdev->pm.i2c_bus->adapter, &info); radeon_combios_get_power_modes()
2711 i2c_bus = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0); radeon_combios_get_power_modes()
2712 rdev->pm.i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus); radeon_combios_get_power_modes()
2713 if (rdev->pm.i2c_bus) { radeon_combios_get_power_modes()
2718 i2c_new_device(&rdev->pm.i2c_bus->adapter, &info); radeon_combios_get_power_modes()
2725 if (rdev->flags & RADEON_IS_MOBILITY) { radeon_combios_get_power_modes()
2731 rdev->pm.power_state[state_index].num_clock_modes = 1; radeon_combios_get_power_modes()
2732 rdev->pm.power_state[state_index].clock_info[0].mclk = RBIOS32(offset + 0x5 + 0x2); radeon_combios_get_power_modes()
2733 rdev->pm.power_state[state_index].clock_info[0].sclk = RBIOS32(offset + 0x5 + 0x6); radeon_combios_get_power_modes()
2734 if ((rdev->pm.power_state[state_index].clock_info[0].mclk == 0) || radeon_combios_get_power_modes()
2735 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) radeon_combios_get_power_modes()
2737 rdev->pm.power_state[state_index].type = radeon_combios_get_power_modes()
2742 rdev->pm.power_state[state_index].misc = misc; radeon_combios_get_power_modes()
2743 rdev->pm.power_state[state_index].misc2 = misc2; radeon_combios_get_power_modes()
2745 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_GPIO; radeon_combios_get_power_modes()
2747 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_combios_get_power_modes()
2750 rdev->pm.power_state[state_index].clock_info[0].voltage.active_high = radeon_combios_get_power_modes()
2752 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.valid = true; radeon_combios_get_power_modes()
2754 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.reg = radeon_combios_get_power_modes()
2757 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.mask = (1 << tmp); radeon_combios_get_power_modes()
2762 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.reg = radeon_combios_get_power_modes()
2765 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.mask = (1 << tmp); radeon_combios_get_power_modes()
2767 rdev->pm.power_state[state_index].clock_info[0].voltage.gpio.valid = false; radeon_combios_get_power_modes()
2772 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 0; radeon_combios_get_power_modes()
2775 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 33; radeon_combios_get_power_modes()
2778 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 66; radeon_combios_get_power_modes()
2781 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 99; radeon_combios_get_power_modes()
2784 rdev->pm.power_state[state_index].clock_info[0].voltage.delay = 132; radeon_combios_get_power_modes()
2788 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE; radeon_combios_get_power_modes()
2790 rdev->pm.power_state[state_index].pcie_lanes = radeon_combios_get_power_modes()
2792 rdev->pm.power_state[state_index].flags = RADEON_PM_STATE_SINGLE_DISPLAY_ONLY; radeon_combios_get_power_modes()
2803 rdev->pm.power_state[state_index].type = radeon_combios_get_power_modes()
2805 rdev->pm.power_state[state_index].num_clock_modes = 1; radeon_combios_get_power_modes()
2806 rdev->pm.power_state[state_index].clock_info[0].mclk = rdev->clock.default_mclk; radeon_combios_get_power_modes()
2807 rdev->pm.power_state[state_index].clock_info[0].sclk = rdev->clock.default_sclk; radeon_combios_get_power_modes()
2808 rdev->pm.power_state[state_index].default_clock_mode = &rdev->pm.power_state[state_index].clock_info[0]; radeon_combios_get_power_modes()
2810 (rdev->pm.power_state[0].clock_info[0].voltage.type == VOLTAGE_GPIO)) radeon_combios_get_power_modes()
2811 rdev->pm.power_state[state_index].clock_info[0].voltage = radeon_combios_get_power_modes()
2812 rdev->pm.power_state[0].clock_info[0].voltage; radeon_combios_get_power_modes()
2814 rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE; radeon_combios_get_power_modes()
2815 rdev->pm.power_state[state_index].pcie_lanes = 16; radeon_combios_get_power_modes()
2816 rdev->pm.power_state[state_index].flags = 0; radeon_combios_get_power_modes()
2817 rdev->pm.default_power_state_index = state_index; radeon_combios_get_power_modes()
2818 rdev->pm.num_power_states = state_index + 1; radeon_combios_get_power_modes()
2820 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index; radeon_combios_get_power_modes()
2821 rdev->pm.current_clock_mode_index = 0; radeon_combios_get_power_modes()
2825 rdev->pm.default_power_state_index = state_index; radeon_combios_get_power_modes()
2826 rdev->pm.num_power_states = 0; radeon_combios_get_power_modes()
2828 rdev->pm.current_power_state_index = rdev->pm.default_power_state_index; radeon_combios_get_power_modes()
2829 rdev->pm.current_clock_mode_index = 0; radeon_combios_get_power_modes()
2882 struct radeon_device *rdev = dev->dev_private; radeon_combios_external_tmds_setup() local
2893 if (rdev->flags & RADEON_IS_IGP) { radeon_combios_external_tmds_setup()
3012 struct radeon_device *rdev = dev->dev_private; combios_parse_mmio_table() local
3091 struct radeon_device *rdev = dev->dev_private; combios_parse_pll_table() local
3182 struct radeon_device *rdev = dev->dev_private; combios_parse_ram_reset_table() local
3193 if (ASIC_IS_R300(rdev)) combios_parse_ram_reset_table()
3229 struct radeon_device *rdev = dev->dev_private; combios_detect_ram() local
3260 struct radeon_device *rdev = dev->dev_private; combios_write_ram_size() local
3267 if (rdev->flags & RADEON_IS_IGP) combios_write_ram_size()
3277 if ((rdev->family < CHIP_R200) && combios_write_ram_size()
3278 !ASIC_IS_RN50(rdev)) combios_write_ram_size()
3289 if ((rdev->family < CHIP_R200) combios_write_ram_size()
3290 && !ASIC_IS_RN50(rdev)) { combios_write_ram_size()
3322 struct radeon_device *rdev = dev->dev_private; radeon_combios_asic_init() local
3326 if (rdev->bios == NULL) radeon_combios_asic_init()
3344 if (!(rdev->flags & RADEON_IS_IGP)) { radeon_combios_asic_init()
3369 if (rdev->family == CHIP_RS480 && radeon_combios_asic_init()
3370 rdev->pdev->subsystem_vendor == 0x103c && radeon_combios_asic_init()
3371 rdev->pdev->subsystem_device == 0x308b) radeon_combios_asic_init()
3377 if (rdev->family == CHIP_RS480 && radeon_combios_asic_init()
3378 rdev->pdev->subsystem_vendor == 0x103c && radeon_combios_asic_init()
3379 rdev->pdev->subsystem_device == 0x30a4) radeon_combios_asic_init()
3385 if (rdev->family == CHIP_RS480 && radeon_combios_asic_init()
3386 rdev->pdev->subsystem_vendor == 0x103c && radeon_combios_asic_init()
3387 rdev->pdev->subsystem_device == 0x30ae) radeon_combios_asic_init()
3393 if (rdev->family == CHIP_RS480 && radeon_combios_asic_init()
3394 rdev->pdev->subsystem_vendor == 0x103c && radeon_combios_asic_init()
3395 rdev->pdev->subsystem_device == 0x280a) radeon_combios_asic_init()
3407 struct radeon_device *rdev = dev->dev_private; radeon_combios_initialize_bios_scratch_regs() local
3432 struct radeon_device *rdev = dev->dev_private; radeon_combios_output_lock() local
3451 struct radeon_device *rdev = dev->dev_private; radeon_combios_connected_scratch_regs() local
3552 struct radeon_device *rdev = dev->dev_private; radeon_combios_encoder_crtc_scratch_regs() local
3587 struct radeon_device *rdev = dev->dev_private; radeon_combios_encoder_dpms_scratch_regs() local
1100 radeon_legacy_get_lvds_info_from_regs(struct radeon_device *rdev) radeon_legacy_get_lvds_info_from_regs() argument
H A Dradeon_vm.c56 * @rdev: radeon_device pointer
60 static unsigned radeon_vm_num_pdes(struct radeon_device *rdev) radeon_vm_num_pdes() argument
62 return rdev->vm_manager.max_pfn >> radeon_vm_block_size; radeon_vm_num_pdes()
68 * @rdev: radeon_device pointer
72 static unsigned radeon_vm_directory_size(struct radeon_device *rdev) radeon_vm_directory_size() argument
74 return RADEON_GPU_PAGE_ALIGN(radeon_vm_num_pdes(rdev) * 8); radeon_vm_directory_size()
80 * @rdev: radeon_device pointer
85 int radeon_vm_manager_init(struct radeon_device *rdev) radeon_vm_manager_init() argument
89 if (!rdev->vm_manager.enabled) { radeon_vm_manager_init()
90 r = radeon_asic_vm_init(rdev); radeon_vm_manager_init()
94 rdev->vm_manager.enabled = true; radeon_vm_manager_init()
102 * @rdev: radeon_device pointer
106 void radeon_vm_manager_fini(struct radeon_device *rdev) radeon_vm_manager_fini() argument
110 if (!rdev->vm_manager.enabled) radeon_vm_manager_fini()
114 radeon_fence_unref(&rdev->vm_manager.active[i]); radeon_vm_manager_fini()
115 radeon_asic_vm_fini(rdev); radeon_vm_manager_fini()
116 rdev->vm_manager.enabled = false; radeon_vm_manager_fini()
128 struct radeon_bo_list *radeon_vm_get_bos(struct radeon_device *rdev, radeon_vm_get_bos() argument
168 * @rdev: radeon_device pointer
177 struct radeon_fence *radeon_vm_grab_id(struct radeon_device *rdev, radeon_vm_grab_id() argument
188 vm_id->last_id_use == rdev->vm_manager.active[vm_id->id]) radeon_vm_grab_id()
195 for (i = 1; i < rdev->vm_manager.nvm; ++i) { radeon_vm_grab_id()
196 struct radeon_fence *fence = rdev->vm_manager.active[i]; radeon_vm_grab_id()
215 return rdev->vm_manager.active[choices[i]]; radeon_vm_grab_id()
227 * @rdev: radeon_device pointer
236 void radeon_vm_flush(struct radeon_device *rdev, radeon_vm_flush() argument
250 radeon_ring_vm_flush(rdev, &rdev->ring[ring], radeon_vm_flush()
259 * @rdev: radeon_device pointer
268 void radeon_vm_fence(struct radeon_device *rdev, radeon_vm_fence() argument
274 radeon_fence_unref(&rdev->vm_manager.active[vm_id]); radeon_vm_fence()
275 rdev->vm_manager.active[vm_id] = radeon_fence_ref(fence); radeon_vm_fence()
309 * @rdev: radeon_device pointer
319 struct radeon_bo_va *radeon_vm_bo_add(struct radeon_device *rdev, radeon_vm_bo_add() argument
348 * @rdev: radeon_device pointer
359 static void radeon_vm_set_pages(struct radeon_device *rdev, radeon_vm_set_pages() argument
368 uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8; radeon_vm_set_pages()
369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); radeon_vm_set_pages()
372 radeon_asic_vm_write_pages(rdev, ib, pe, addr, radeon_vm_set_pages()
376 radeon_asic_vm_set_pages(rdev, ib, pe, addr, radeon_vm_set_pages()
384 * @rdev: radeon_device pointer
387 static int radeon_vm_clear_bo(struct radeon_device *rdev, radeon_vm_clear_bo() argument
406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); radeon_vm_clear_bo()
412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); radeon_vm_clear_bo()
413 radeon_asic_vm_pad_ib(rdev, &ib); radeon_vm_clear_bo()
416 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_vm_clear_bo()
424 radeon_ib_free(rdev, &ib); radeon_vm_clear_bo()
434 * @rdev: radeon_device pointer
445 int radeon_vm_bo_set_addr(struct radeon_device *rdev, radeon_vm_bo_set_addr() argument
465 if (last_pfn >= rdev->vm_manager.max_pfn) { radeon_vm_bo_set_addr()
466 dev_err(rdev->dev, "va above limit (0x%08X >= 0x%08X)\n", radeon_vm_bo_set_addr()
467 last_pfn, rdev->vm_manager.max_pfn); radeon_vm_bo_set_addr()
486 dev_err(rdev->dev, "bo %p va 0x%010Lx conflict with " radeon_vm_bo_set_addr()
532 BUG_ON(eoffset >= radeon_vm_num_pdes(rdev)); radeon_vm_bo_set_addr()
549 r = radeon_bo_create(rdev, RADEON_VM_PTE_COUNT * 8, radeon_vm_bo_set_addr()
556 r = radeon_vm_clear_bo(rdev, pt); radeon_vm_bo_set_addr()
587 * @rdev: radeon_device pointer
594 uint64_t radeon_vm_map_gart(struct radeon_device *rdev, uint64_t addr) radeon_vm_map_gart() argument
599 result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT]; radeon_vm_map_gart()
628 * @rdev: radeon_device pointer
639 int radeon_vm_update_page_directory(struct radeon_device *rdev, radeon_vm_update_page_directory() argument
660 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); radeon_vm_update_page_directory()
683 radeon_vm_set_pages(rdev, &ib, last_pde, radeon_vm_update_page_directory()
697 radeon_vm_set_pages(rdev, &ib, last_pde, last_pt, count, radeon_vm_update_page_directory()
701 radeon_asic_vm_pad_ib(rdev, &ib); radeon_vm_update_page_directory()
703 radeon_sync_resv(rdev, &ib.sync, pd->tbo.resv, true); radeon_vm_update_page_directory()
705 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_vm_update_page_directory()
707 radeon_ib_free(rdev, &ib); radeon_vm_update_page_directory()
713 radeon_ib_free(rdev, &ib); radeon_vm_update_page_directory()
721 * @rdev: radeon_device pointer
730 static void radeon_vm_frag_ptes(struct radeon_device *rdev, radeon_vm_frag_ptes() argument
755 uint64_t frag_flags = ((rdev->family == CHIP_CAYMAN) || radeon_vm_frag_ptes()
756 (rdev->family == CHIP_ARUBA)) ? radeon_vm_frag_ptes()
758 uint64_t frag_align = ((rdev->family == CHIP_CAYMAN) || radeon_vm_frag_ptes()
759 (rdev->family == CHIP_ARUBA)) ? 0x200 : 0x80; radeon_vm_frag_ptes()
771 radeon_vm_set_pages(rdev, ib, pe_start, addr, count, radeon_vm_frag_ptes()
779 radeon_vm_set_pages(rdev, ib, pe_start, addr, count, radeon_vm_frag_ptes()
786 radeon_vm_set_pages(rdev, ib, frag_start, addr, count, radeon_vm_frag_ptes()
793 radeon_vm_set_pages(rdev, ib, frag_end, addr, count, radeon_vm_frag_ptes()
801 * @rdev: radeon_device pointer
812 static int radeon_vm_update_ptes(struct radeon_device *rdev, radeon_vm_update_ptes() argument
831 radeon_sync_resv(rdev, &ib->sync, pt->tbo.resv, true); radeon_vm_update_ptes()
847 radeon_vm_frag_ptes(rdev, ib, last_pte, radeon_vm_update_ptes()
864 radeon_vm_frag_ptes(rdev, ib, last_pte, radeon_vm_update_ptes()
900 * @rdev: radeon_device pointer
910 int radeon_vm_bo_update(struct radeon_device *rdev, radeon_vm_bo_update() argument
922 dev_err(rdev->dev, "bo %p don't has a mapping in vm %p\n", radeon_vm_bo_update()
957 addr += rdev->vm_manager.vram_base_offset; radeon_vm_bo_update()
998 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); radeon_vm_bo_update()
1010 r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start, radeon_vm_bo_update()
1014 radeon_ib_free(rdev, &ib); radeon_vm_bo_update()
1018 radeon_asic_vm_pad_ib(rdev, &ib); radeon_vm_bo_update()
1021 r = radeon_ib_schedule(rdev, &ib, NULL, false); radeon_vm_bo_update()
1023 radeon_ib_free(rdev, &ib); radeon_vm_bo_update()
1030 radeon_ib_free(rdev, &ib); radeon_vm_bo_update()
1038 * @rdev: radeon_device pointer
1046 int radeon_vm_clear_freed(struct radeon_device *rdev, radeon_vm_clear_freed() argument
1058 r = radeon_vm_bo_update(rdev, bo_va, NULL); radeon_vm_clear_freed()
1076 * @rdev: radeon_device pointer
1084 int radeon_vm_clear_invalids(struct radeon_device *rdev, radeon_vm_clear_invalids() argument
1096 r = radeon_vm_bo_update(rdev, bo_va, NULL); radeon_vm_clear_invalids()
1110 * @rdev: radeon_device pointer
1117 void radeon_vm_bo_rmv(struct radeon_device *rdev, radeon_vm_bo_rmv() argument
1145 * @rdev: radeon_device pointer
1151 void radeon_vm_bo_invalidate(struct radeon_device *rdev, radeon_vm_bo_invalidate() argument
1168 * @rdev: radeon_device pointer
1173 int radeon_vm_init(struct radeon_device *rdev, struct radeon_vm *vm) radeon_vm_init() argument
1193 pd_size = radeon_vm_directory_size(rdev); radeon_vm_init()
1194 pd_entries = radeon_vm_num_pdes(rdev); radeon_vm_init()
1204 r = radeon_bo_create(rdev, pd_size, align, true, radeon_vm_init()
1210 r = radeon_vm_clear_bo(rdev, vm->page_directory); radeon_vm_init()
1223 * @rdev: radeon_device pointer
1229 void radeon_vm_fini(struct radeon_device *rdev, struct radeon_vm *vm) radeon_vm_fini() argument
1235 dev_err(rdev->dev, "still active bo inside vm\n"); radeon_vm_fini()
1253 for (i = 0; i < radeon_vm_num_pdes(rdev); i++) radeon_vm_fini()
H A Dradeon_legacy_encoders.c49 struct radeon_device *rdev = dev->dev_private; radeon_legacy_lvds_update() local
61 if (rdev->is_atom_bios) { radeon_legacy_lvds_update()
77 if ((rdev->mode_info.connector_table == CT_IBOOK) || radeon_legacy_lvds_update()
78 (rdev->mode_info.connector_table == CT_POWERBOOK_EXTERNAL) || radeon_legacy_lvds_update()
79 (rdev->mode_info.connector_table == CT_POWERBOOK_INTERNAL) || radeon_legacy_lvds_update()
80 (rdev->mode_info.connector_table == CT_POWERBOOK_VGA)) radeon_legacy_lvds_update()
128 if (rdev->is_atom_bios) radeon_legacy_lvds_update()
137 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_lvds_dpms() local
142 if (rdev->is_atom_bios) { radeon_legacy_lvds_dpms()
156 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_lvds_prepare() local
158 if (rdev->is_atom_bios) radeon_legacy_lvds_prepare()
167 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_lvds_commit() local
170 if (rdev->is_atom_bios) radeon_legacy_lvds_commit()
181 struct radeon_device *rdev = dev->dev_private; radeon_legacy_lvds_mode_set() local
192 if (rdev->is_atom_bios) { radeon_legacy_lvds_mode_set()
217 if (ASIC_IS_R300(rdev)) radeon_legacy_lvds_mode_set()
221 if (ASIC_IS_R300(rdev)) { radeon_legacy_lvds_mode_set()
227 if (ASIC_IS_R300(rdev)) radeon_legacy_lvds_mode_set()
237 if (rdev->family == CHIP_RV410) radeon_legacy_lvds_mode_set()
240 if (rdev->is_atom_bios) radeon_legacy_lvds_mode_set()
276 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_backlight_level() local
289 struct radeon_device *rdev = dev->dev_private; radeon_legacy_set_backlight_level() local
293 if (rdev->is_atom_bios) { radeon_legacy_set_backlight_level()
350 struct radeon_device *rdev = dev->dev_private; radeon_legacy_backlight_get_brightness() local
368 struct radeon_device *rdev = dev->dev_private; radeon_legacy_backlight_init() local
418 pdata->negative = (rdev->family != CHIP_RV200 && radeon_legacy_backlight_init()
419 rdev->family != CHIP_RV250 && radeon_legacy_backlight_init()
420 rdev->family != CHIP_RV280 && radeon_legacy_backlight_init()
421 rdev->family != CHIP_RV350); radeon_legacy_backlight_init()
431 if (rdev->is_atom_bios) { radeon_legacy_backlight_init()
444 rdev->mode_info.bl_encoder = radeon_encoder; radeon_legacy_backlight_init()
456 struct radeon_device *rdev = dev->dev_private; radeon_legacy_backlight_exit() local
462 if (rdev->is_atom_bios) { radeon_legacy_backlight_exit()
515 struct radeon_device *rdev = dev->dev_private; radeon_legacy_primary_dac_dpms() local
542 if (!(rdev->flags & RADEON_SINGLE_CRTC)) radeon_legacy_primary_dac_dpms()
547 if (rdev->is_atom_bios) radeon_legacy_primary_dac_dpms()
556 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_primary_dac_prepare() local
558 if (rdev->is_atom_bios) radeon_legacy_primary_dac_prepare()
567 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_primary_dac_commit() local
571 if (rdev->is_atom_bios) radeon_legacy_primary_dac_commit()
582 struct radeon_device *rdev = dev->dev_private; radeon_legacy_primary_dac_mode_set() local
590 if (rdev->family == CHIP_R200 || ASIC_IS_R300(rdev)) { radeon_legacy_primary_dac_mode_set()
599 if (rdev->family == CHIP_R200 || ASIC_IS_R300(rdev)) { radeon_legacy_primary_dac_mode_set()
628 if (rdev->is_atom_bios) radeon_legacy_primary_dac_mode_set()
638 struct radeon_device *rdev = dev->dev_private; radeon_legacy_primary_dac_detect() local
648 if (ASIC_IS_RN50(rdev)) { radeon_legacy_primary_dac_detect()
674 if (ASIC_IS_R300(rdev)) radeon_legacy_primary_dac_detect()
676 else if (ASIC_IS_RV100(rdev)) radeon_legacy_primary_dac_detect()
727 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tmds_int_dpms() local
744 if (rdev->is_atom_bios) radeon_legacy_tmds_int_dpms()
753 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tmds_int_prepare() local
755 if (rdev->is_atom_bios) radeon_legacy_tmds_int_prepare()
764 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tmds_int_commit() local
768 if (rdev->is_atom_bios) radeon_legacy_tmds_int_commit()
779 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tmds_int_mode_set() local
789 if (rdev->family == CHIP_RV280) { radeon_legacy_tmds_int_mode_set()
808 if (ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV280)) { radeon_legacy_tmds_int_mode_set()
821 if (rdev->family == CHIP_R200 || radeon_legacy_tmds_int_mode_set()
822 rdev->family == CHIP_R100 || radeon_legacy_tmds_int_mode_set()
823 ASIC_IS_R300(rdev)) radeon_legacy_tmds_int_mode_set()
848 if (ASIC_IS_R300(rdev) || rdev->family == CHIP_R200) { radeon_legacy_tmds_int_mode_set()
857 if (ASIC_IS_R300(rdev) || rdev->family == CHIP_R200) { radeon_legacy_tmds_int_mode_set()
868 if (rdev->is_atom_bios) radeon_legacy_tmds_int_mode_set()
891 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tmds_ext_dpms() local
910 if (rdev->is_atom_bios) radeon_legacy_tmds_ext_dpms()
919 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tmds_ext_prepare() local
921 if (rdev->is_atom_bios) radeon_legacy_tmds_ext_prepare()
930 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tmds_ext_commit() local
933 if (rdev->is_atom_bios) radeon_legacy_tmds_ext_commit()
944 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tmds_ext_mode_set() local
951 if (rdev->is_atom_bios) { radeon_legacy_tmds_ext_mode_set()
968 if (ASIC_IS_R300(rdev)) { radeon_legacy_tmds_ext_mode_set()
984 if ((rdev->family == CHIP_R200) || ASIC_IS_R300(rdev)) { radeon_legacy_tmds_ext_mode_set()
993 if ((rdev->family == CHIP_R200) || ASIC_IS_R300(rdev)) { radeon_legacy_tmds_ext_mode_set()
1002 if (rdev->is_atom_bios) radeon_legacy_tmds_ext_mode_set()
1034 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tv_dac_dpms() local
1043 if (rdev->family == CHIP_R200) radeon_legacy_tv_dac_dpms()
1055 if (rdev->family == CHIP_R200) { radeon_legacy_tv_dac_dpms()
1063 if (rdev->family == CHIP_R420 || radeon_legacy_tv_dac_dpms()
1064 rdev->family == CHIP_R423 || radeon_legacy_tv_dac_dpms()
1065 rdev->family == CHIP_RV410) radeon_legacy_tv_dac_dpms()
1080 if (rdev->family == CHIP_R200) radeon_legacy_tv_dac_dpms()
1088 if (rdev->family == CHIP_R420 || radeon_legacy_tv_dac_dpms()
1089 rdev->family == CHIP_R423 || radeon_legacy_tv_dac_dpms()
1090 rdev->family == CHIP_RV410) radeon_legacy_tv_dac_dpms()
1104 if (rdev->family == CHIP_R200) { radeon_legacy_tv_dac_dpms()
1110 else if (!(rdev->flags & RADEON_SINGLE_CRTC)) radeon_legacy_tv_dac_dpms()
1115 if (rdev->is_atom_bios) radeon_legacy_tv_dac_dpms()
1124 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tv_dac_prepare() local
1126 if (rdev->is_atom_bios) radeon_legacy_tv_dac_prepare()
1135 struct radeon_device *rdev = encoder->dev->dev_private; radeon_legacy_tv_dac_commit() local
1139 if (rdev->is_atom_bios) radeon_legacy_tv_dac_commit()
1150 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tv_dac_mode_set() local
1162 if (rdev->family != CHIP_R200) { radeon_legacy_tv_dac_mode_set()
1164 if (rdev->family == CHIP_R420 || radeon_legacy_tv_dac_mode_set()
1165 rdev->family == CHIP_R423 || radeon_legacy_tv_dac_mode_set()
1166 rdev->family == CHIP_RV410) { radeon_legacy_tv_dac_mode_set()
1206 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1209 } else if (rdev->family != CHIP_R200) radeon_legacy_tv_dac_mode_set()
1211 else if (rdev->family == CHIP_R200) radeon_legacy_tv_dac_mode_set()
1214 if (rdev->family >= CHIP_R200) radeon_legacy_tv_dac_mode_set()
1224 if (ASIC_IS_R300(rdev)) radeon_legacy_tv_dac_mode_set()
1229 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1234 if (rdev->family >= CHIP_R200) { radeon_legacy_tv_dac_mode_set()
1240 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1244 if (rdev->family >= CHIP_R200) { radeon_legacy_tv_dac_mode_set()
1256 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1259 } else if (rdev->family == CHIP_R200) { radeon_legacy_tv_dac_mode_set()
1265 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1268 } else if (rdev->family == CHIP_R200) { radeon_legacy_tv_dac_mode_set()
1278 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_mode_set()
1281 } else if (rdev->family != CHIP_R200) radeon_legacy_tv_dac_mode_set()
1283 else if (rdev->family == CHIP_R200) radeon_legacy_tv_dac_mode_set()
1286 if (rdev->family >= CHIP_R200) radeon_legacy_tv_dac_mode_set()
1292 if (rdev->is_atom_bios) radeon_legacy_tv_dac_mode_set()
1303 struct radeon_device *rdev = dev->dev_private; r300_legacy_tv_detect() local
1374 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tv_detect() local
1379 if (ASIC_IS_R300(rdev)) radeon_legacy_tv_detect()
1438 struct radeon_device *rdev = dev->dev_private; radeon_legacy_ext_dac_detect() local
1528 struct radeon_device *rdev = dev->dev_private; radeon_legacy_tv_dac_detect() local
1569 if (rdev->family == CHIP_R200) { radeon_legacy_tv_dac_detect()
1578 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_tv_dac_detect()
1581 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_detect()
1597 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_tv_dac_detect()
1606 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_detect()
1632 if (ASIC_IS_R300(rdev)) radeon_legacy_tv_dac_detect()
1644 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_detect()
1657 if (rdev->flags & RADEON_SINGLE_CRTC) { radeon_legacy_tv_dac_detect()
1661 if (ASIC_IS_R300(rdev)) { radeon_legacy_tv_dac_detect()
1694 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_tmds_info() local
1703 if (rdev->is_atom_bios) radeon_legacy_get_tmds_info()
1717 struct radeon_device *rdev = dev->dev_private; radeon_legacy_get_ext_tmds_info() local
1721 if (rdev->is_atom_bios) radeon_legacy_get_ext_tmds_info()
1740 struct radeon_device *rdev = dev->dev_private; radeon_add_legacy_encoder() local
1760 if (rdev->flags & RADEON_SINGLE_CRTC) radeon_add_legacy_encoder()
1777 if (rdev->is_atom_bios) radeon_add_legacy_encoder()
1791 if (rdev->is_atom_bios) radeon_add_legacy_encoder()
1799 if (rdev->is_atom_bios) radeon_add_legacy_encoder()
1807 if (!rdev->is_atom_bios) radeon_add_legacy_encoder()
H A Dsi_dma.c30 u32 si_gpu_check_soft_reset(struct radeon_device *rdev);
35 * @rdev: radeon_device pointer
41 bool si_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) si_dma_is_lockup() argument
43 u32 reset_mask = si_gpu_check_soft_reset(rdev); si_dma_is_lockup()
52 radeon_ring_lockup_update(rdev, ring); si_dma_is_lockup()
55 return radeon_ring_test_lockup(rdev, ring); si_dma_is_lockup()
61 * @rdev: radeon_device pointer
69 void si_dma_vm_copy_pages(struct radeon_device *rdev, si_dma_vm_copy_pages() argument
95 * @rdev: radeon_device pointer
105 void si_dma_vm_write_pages(struct radeon_device *rdev, si_dma_vm_write_pages() argument
125 value = radeon_vm_map_gart(rdev, addr); si_dma_vm_write_pages()
142 * @rdev: radeon_device pointer
152 void si_dma_vm_set_pages(struct radeon_device *rdev, si_dma_vm_set_pages() argument
187 void si_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, si_dma_vm_flush() argument
221 * @rdev: radeon_device pointer
231 struct radeon_fence *si_copy_dma(struct radeon_device *rdev, si_copy_dma() argument
238 int ring_index = rdev->asic->copy.dma_ring_index; si_copy_dma()
239 struct radeon_ring *ring = &rdev->ring[ring_index]; si_copy_dma()
248 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11); si_copy_dma()
251 radeon_sync_free(rdev, &sync, NULL); si_copy_dma()
255 radeon_sync_resv(rdev, &sync, resv, false); si_copy_dma()
256 radeon_sync_rings(rdev, &sync, ring->idx); si_copy_dma()
272 r = radeon_fence_emit(rdev, &fence, ring->idx); si_copy_dma()
274 radeon_ring_unlock_undo(rdev, ring); si_copy_dma()
275 radeon_sync_free(rdev, &sync, NULL); si_copy_dma()
279 radeon_ring_unlock_commit(rdev, ring, false); si_copy_dma()
280 radeon_sync_free(rdev, &sync, fence); si_copy_dma()
H A Ddce6_afmt.c32 u32 dce6_endpoint_rreg(struct radeon_device *rdev, dce6_endpoint_rreg() argument
38 spin_lock_irqsave(&rdev->end_idx_lock, flags); dce6_endpoint_rreg()
41 spin_unlock_irqrestore(&rdev->end_idx_lock, flags); dce6_endpoint_rreg()
46 void dce6_endpoint_wreg(struct radeon_device *rdev, dce6_endpoint_wreg() argument
51 spin_lock_irqsave(&rdev->end_idx_lock, flags); dce6_endpoint_wreg()
52 if (ASIC_IS_DCE8(rdev)) dce6_endpoint_wreg()
58 spin_unlock_irqrestore(&rdev->end_idx_lock, flags); dce6_endpoint_wreg()
61 static void dce6_afmt_get_connected_pins(struct radeon_device *rdev) dce6_afmt_get_connected_pins() argument
66 for (i = 0; i < rdev->audio.num_pins; i++) { dce6_afmt_get_connected_pins()
67 offset = rdev->audio.pin[i].offset; dce6_afmt_get_connected_pins()
71 rdev->audio.pin[i].connected = false; dce6_afmt_get_connected_pins()
73 rdev->audio.pin[i].connected = true; dce6_afmt_get_connected_pins()
77 struct r600_audio_pin *dce6_audio_get_pin(struct radeon_device *rdev) dce6_audio_get_pin() argument
85 dce6_afmt_get_connected_pins(rdev); dce6_audio_get_pin()
87 for (i = 0; i < rdev->audio.num_pins; i++) { dce6_audio_get_pin()
88 if (rdev->audio.pin[i].connected) { dce6_audio_get_pin()
89 pin = &rdev->audio.pin[i]; dce6_audio_get_pin()
92 list_for_each_entry(encoder, &rdev->ddev->mode_config.encoder_list, head) { dce6_audio_get_pin()
112 struct radeon_device *rdev = encoder->dev->dev_private; dce6_afmt_select_pin() local
127 struct radeon_device *rdev = encoder->dev->dev_private; dce6_afmt_write_latency_fields() local
155 struct radeon_device *rdev = encoder->dev->dev_private; dce6_afmt_hdmi_write_speaker_allocation() local
180 struct radeon_device *rdev = encoder->dev->dev_private; dce6_afmt_dp_write_speaker_allocation() local
208 struct radeon_device *rdev = encoder->dev->dev_private; dce6_afmt_write_sad_regs() local
257 void dce6_audio_enable(struct radeon_device *rdev, dce6_audio_enable() argument
268 void dce6_hdmi_audio_set_dto(struct radeon_device *rdev, dce6_hdmi_audio_set_dto() argument
287 void dce6_dp_audio_set_dto(struct radeon_device *rdev, dce6_dp_audio_set_dto() argument
303 if (ASIC_IS_DCE8(rdev)) { dce6_dp_audio_set_dto()
H A Dradeon_audio.h31 radeon_audio_endpoint_rreg(rdev, (block), (reg))
33 radeon_audio_endpoint_wreg(rdev, (block), (reg), (v))
37 u32 (*endpoint_rreg)(struct radeon_device *rdev, u32 offset, u32 reg);
38 void (*endpoint_wreg)(struct radeon_device *rdev,
40 void (*enable)(struct radeon_device *rdev,
47 struct r600_audio_pin* (*get_pin)(struct radeon_device *rdev);
54 void (*set_dto)(struct radeon_device *rdev,
60 void (*set_avi_packet)(struct radeon_device *rdev, u32 offset,
69 int radeon_audio_init(struct radeon_device *rdev);
73 u32 radeon_audio_endpoint_rreg(struct radeon_device *rdev,
75 void radeon_audio_endpoint_wreg(struct radeon_device *rdev,
78 void radeon_audio_fini(struct radeon_device *rdev);
H A Dradeon_semaphore.c34 int radeon_semaphore_create(struct radeon_device *rdev, radeon_semaphore_create() argument
43 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, radeon_semaphore_create()
58 bool radeon_semaphore_emit_signal(struct radeon_device *rdev, int ridx, radeon_semaphore_emit_signal() argument
61 struct radeon_ring *ring = &rdev->ring[ridx]; radeon_semaphore_emit_signal()
65 if (radeon_semaphore_ring_emit(rdev, ridx, ring, semaphore, false)) { radeon_semaphore_emit_signal()
75 bool radeon_semaphore_emit_wait(struct radeon_device *rdev, int ridx, radeon_semaphore_emit_wait() argument
78 struct radeon_ring *ring = &rdev->ring[ridx]; radeon_semaphore_emit_wait()
82 if (radeon_semaphore_ring_emit(rdev, ridx, ring, semaphore, true)) { radeon_semaphore_emit_wait()
92 void radeon_semaphore_free(struct radeon_device *rdev, radeon_semaphore_free() argument
100 dev_err(rdev->dev, "semaphore %p has more waiters than signalers," radeon_semaphore_free()
103 radeon_sa_bo_free(rdev, &(*semaphore)->sa_bo, fence); radeon_semaphore_free()
H A Dradeon_cs.c77 struct drm_device *ddev = p->rdev->ddev; radeon_cs_parser_relocs()
125 (i == 0 || drm_pci_device_is_agp(p->rdev->ddev) || radeon_cs_parser_relocs()
126 p->rdev->family == CHIP_RS780 || radeon_cs_parser_relocs()
127 p->rdev->family == CHIP_RS880)) { radeon_cs_parser_relocs()
177 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, radeon_cs_parser_relocs()
182 r = radeon_bo_list_validate(p->rdev, &p->ticket, &p->validated, p->ring); radeon_cs_parser_relocs()
202 if (p->rdev->family >= CHIP_TAHITI) { radeon_cs_get_ring()
211 if (p->rdev->family >= CHIP_CAYMAN) { radeon_cs_get_ring()
216 } else if (p->rdev->family >= CHIP_RV770) { radeon_cs_get_ring()
242 r = radeon_sync_resv(p->rdev, &p->ib.sync, resv, radeon_cs_sync_rings()
328 if (!p->rdev || !(p->rdev->flags & RADEON_IS_AGP)) radeon_cs_parser_init()
350 if (p->rdev) { radeon_cs_parser_init()
352 !p->rdev->vm_manager.enabled) { radeon_cs_parser_init()
362 if (p->rdev->asic->ring[p->ring]->cs_parse == NULL) { radeon_cs_parser_init()
367 if (p->rdev->asic->ring[p->ring]->ib_parse == NULL) { radeon_cs_parser_init()
437 radeon_ib_free(parser->rdev, &parser->ib); radeon_cs_parser_fini()
438 radeon_ib_free(parser->rdev, &parser->const_ib); radeon_cs_parser_fini()
441 static int radeon_cs_ib_chunk(struct radeon_device *rdev, radeon_cs_ib_chunk() argument
452 r = radeon_cs_parse(rdev, parser->ring, parser); radeon_cs_ib_chunk()
466 radeon_uvd_note_usage(rdev); radeon_cs_ib_chunk()
469 radeon_vce_note_usage(rdev); radeon_cs_ib_chunk()
471 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); radeon_cs_ib_chunk()
481 struct radeon_device *rdev = p->rdev; radeon_bo_vm_update_pte() local
485 r = radeon_vm_update_page_directory(rdev, vm); radeon_bo_vm_update_pte()
489 r = radeon_vm_clear_freed(rdev, vm); radeon_bo_vm_update_pte()
498 r = radeon_vm_bo_update(rdev, vm->ib_bo_va, radeon_bo_vm_update_pte()
499 &rdev->ring_tmp_bo.bo->tbo.mem); radeon_bo_vm_update_pte()
509 dev_err(rdev->dev, "bo %p not in vm %p\n", bo, vm); radeon_bo_vm_update_pte()
513 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); radeon_bo_vm_update_pte()
520 return radeon_vm_clear_invalids(rdev, vm); radeon_bo_vm_update_pte()
523 static int radeon_cs_ib_vm_chunk(struct radeon_device *rdev, radeon_cs_ib_vm_chunk() argument
536 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->const_ib); radeon_cs_ib_vm_chunk()
542 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib); radeon_cs_ib_vm_chunk()
548 radeon_uvd_note_usage(rdev); radeon_cs_ib_vm_chunk()
563 if ((rdev->family >= CHIP_TAHITI) && radeon_cs_ib_vm_chunk()
565 r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true); radeon_cs_ib_vm_chunk()
567 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); radeon_cs_ib_vm_chunk()
575 static int radeon_cs_handle_lockup(struct radeon_device *rdev, int r) radeon_cs_handle_lockup() argument
578 r = radeon_gpu_reset(rdev); radeon_cs_handle_lockup()
585 static int radeon_cs_ib_fill(struct radeon_device *rdev, struct radeon_cs_parser *parser) radeon_cs_ib_fill() argument
598 if ((rdev->family >= CHIP_TAHITI) && radeon_cs_ib_fill()
605 r = radeon_ib_get(rdev, parser->ring, &parser->const_ib, radeon_cs_ib_fill()
627 r = radeon_ib_get(rdev, parser->ring, &parser->ib, radeon_cs_ib_fill()
643 struct radeon_device *rdev = dev->dev_private; radeon_cs_ioctl() local
647 down_read(&rdev->exclusive_lock); radeon_cs_ioctl()
648 if (!rdev->accel_working) { radeon_cs_ioctl()
649 up_read(&rdev->exclusive_lock); radeon_cs_ioctl()
652 if (rdev->in_reset) { radeon_cs_ioctl()
653 up_read(&rdev->exclusive_lock); radeon_cs_ioctl()
654 r = radeon_gpu_reset(rdev); radeon_cs_ioctl()
662 parser.rdev = rdev; radeon_cs_ioctl()
663 parser.dev = rdev->dev; radeon_cs_ioctl()
664 parser.family = rdev->family; radeon_cs_ioctl()
669 up_read(&rdev->exclusive_lock); radeon_cs_ioctl()
670 r = radeon_cs_handle_lockup(rdev, r); radeon_cs_ioctl()
674 r = radeon_cs_ib_fill(rdev, &parser); radeon_cs_ioctl()
683 up_read(&rdev->exclusive_lock); radeon_cs_ioctl()
684 r = radeon_cs_handle_lockup(rdev, r); radeon_cs_ioctl()
690 r = radeon_cs_ib_chunk(rdev, &parser); radeon_cs_ioctl()
694 r = radeon_cs_ib_vm_chunk(rdev, &parser); radeon_cs_ioctl()
700 up_read(&rdev->exclusive_lock); radeon_cs_ioctl()
701 r = radeon_cs_handle_lockup(rdev, r); radeon_cs_ioctl()
718 struct radeon_device *rdev = p->rdev; radeon_cs_packet_parse() local
734 if (rdev->family < CHIP_R600) { radeon_cs_packet_parse()
H A Dradeon_display.c44 struct radeon_device *rdev = dev->dev_private; avivo_crtc_load_lut() local
78 struct radeon_device *rdev = dev->dev_private; dce4_crtc_load_lut() local
108 struct radeon_device *rdev = dev->dev_private; dce5_crtc_load_lut() local
161 if (ASIC_IS_DCE8(rdev)) { dce5_crtc_load_lut()
174 struct radeon_device *rdev = dev->dev_private; legacy_crtc_load_lut() local
197 struct radeon_device *rdev = dev->dev_private; radeon_crtc_load_lut() local
202 if (ASIC_IS_DCE5(rdev)) radeon_crtc_load_lut()
204 else if (ASIC_IS_DCE4(rdev)) radeon_crtc_load_lut()
206 else if (ASIC_IS_AVIVO(rdev)) radeon_crtc_load_lut()
286 void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id) radeon_crtc_handle_vblank() argument
288 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; radeon_crtc_handle_vblank()
306 if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev)) radeon_crtc_handle_vblank()
309 spin_lock_irqsave(&rdev->ddev->event_lock, flags); radeon_crtc_handle_vblank()
315 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags); radeon_crtc_handle_vblank()
319 update_pending = radeon_page_flip_pending(rdev, crtc_id); radeon_crtc_handle_vblank()
325 (DRM_SCANOUTPOS_VALID & radeon_get_crtc_scanoutpos(rdev->ddev, radeon_crtc_handle_vblank()
329 &rdev->mode_info.crtcs[crtc_id]->base.hwmode)) && radeon_crtc_handle_vblank()
330 ((vpos >= (99 * rdev->mode_info.crtcs[crtc_id]->base.hwmode.crtc_vdisplay)/100) || radeon_crtc_handle_vblank()
331 (vpos < 0 && !ASIC_IS_AVIVO(rdev)))) { radeon_crtc_handle_vblank()
340 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags); radeon_crtc_handle_vblank()
342 radeon_crtc_handle_flip(rdev, crtc_id); radeon_crtc_handle_vblank()
348 * @rdev: radeon device pointer
353 void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id) radeon_crtc_handle_flip() argument
355 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; radeon_crtc_handle_flip()
363 spin_lock_irqsave(&rdev->ddev->event_lock, flags); radeon_crtc_handle_flip()
370 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags); radeon_crtc_handle_flip()
380 drm_send_vblank_event(rdev->ddev, crtc_id, work->event); radeon_crtc_handle_flip()
382 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags); radeon_crtc_handle_flip()
384 drm_vblank_put(rdev->ddev, radeon_crtc->crtc_id); radeon_crtc_handle_flip()
385 radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id); radeon_crtc_handle_flip()
400 struct radeon_device *rdev = work->rdev; radeon_flip_work_func() local
401 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id]; radeon_flip_work_func()
410 down_read(&rdev->exclusive_lock); radeon_flip_work_func()
415 if (fence && fence->rdev == rdev) { radeon_flip_work_func()
418 up_read(&rdev->exclusive_lock); radeon_flip_work_func()
420 r = radeon_gpu_reset(rdev); radeon_flip_work_func()
422 down_read(&rdev->exclusive_lock); radeon_flip_work_func()
443 radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id); radeon_flip_work_func()
463 stat = radeon_get_crtc_scanoutpos(rdev->ddev, work->crtc_id, radeon_flip_work_func()
493 radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base); radeon_flip_work_func()
497 up_read(&rdev->exclusive_lock); radeon_flip_work_func()
506 struct radeon_device *rdev = dev->dev_private; radeon_crtc_page_flip() local
525 work->rdev = rdev; radeon_crtc_page_flip()
552 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base); radeon_crtc_page_flip()
563 if (!ASIC_IS_AVIVO(rdev)) { radeon_crtc_page_flip()
569 if (ASIC_IS_R300(rdev)) { radeon_crtc_page_flip()
650 struct radeon_device *rdev; radeon_crtc_set_config() local
672 rdev = dev->dev_private; radeon_crtc_set_config()
675 if (active && !rdev->have_disp_power_ref) { radeon_crtc_set_config()
676 rdev->have_disp_power_ref = true; radeon_crtc_set_config()
681 if (!active && rdev->have_disp_power_ref) { radeon_crtc_set_config()
683 rdev->have_disp_power_ref = false; radeon_crtc_set_config()
701 struct radeon_device *rdev = dev->dev_private; radeon_crtc_init() local
714 rdev->mode_info.crtcs[index] = radeon_crtc; radeon_crtc_init()
716 if (rdev->family >= CHIP_BONAIRE) { radeon_crtc_init()
738 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom)) radeon_crtc_init()
872 struct radeon_device *rdev = dev->dev_private; radeon_setup_enc_conn() local
875 if (rdev->bios) { radeon_setup_enc_conn()
876 if (rdev->is_atom_bios) { radeon_setup_enc_conn()
886 if (!ASIC_IS_AVIVO(rdev)) radeon_setup_enc_conn()
1395 struct radeon_device *rdev = dev->dev_private; radeon_output_poll_changed() local
1396 radeon_fb_output_poll_changed(rdev); radeon_output_poll_changed()
1445 static int radeon_modeset_create_props(struct radeon_device *rdev) radeon_modeset_create_props() argument
1449 if (rdev->is_atom_bios) { radeon_modeset_create_props()
1450 rdev->mode_info.coherent_mode_property = radeon_modeset_create_props()
1451 drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1); radeon_modeset_create_props()
1452 if (!rdev->mode_info.coherent_mode_property) radeon_modeset_create_props()
1456 if (!ASIC_IS_AVIVO(rdev)) { radeon_modeset_create_props()
1458 rdev->mode_info.tmds_pll_property = radeon_modeset_create_props()
1459 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1464 rdev->mode_info.load_detect_property = radeon_modeset_create_props()
1465 drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1); radeon_modeset_create_props()
1466 if (!rdev->mode_info.load_detect_property) radeon_modeset_create_props()
1469 drm_mode_create_scaling_mode_property(rdev->ddev); radeon_modeset_create_props()
1472 rdev->mode_info.tv_std_property = radeon_modeset_create_props()
1473 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1478 rdev->mode_info.underscan_property = radeon_modeset_create_props()
1479 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1483 rdev->mode_info.underscan_hborder_property = radeon_modeset_create_props()
1484 drm_property_create_range(rdev->ddev, 0, radeon_modeset_create_props()
1486 if (!rdev->mode_info.underscan_hborder_property) radeon_modeset_create_props()
1489 rdev->mode_info.underscan_vborder_property = radeon_modeset_create_props()
1490 drm_property_create_range(rdev->ddev, 0, radeon_modeset_create_props()
1492 if (!rdev->mode_info.underscan_vborder_property) radeon_modeset_create_props()
1496 rdev->mode_info.audio_property = radeon_modeset_create_props()
1497 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1502 rdev->mode_info.dither_property = radeon_modeset_create_props()
1503 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1508 rdev->mode_info.output_csc_property = radeon_modeset_create_props()
1509 drm_property_create_enum(rdev->ddev, 0, radeon_modeset_create_props()
1516 void radeon_update_display_priority(struct radeon_device *rdev) radeon_update_display_priority() argument
1526 if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) && radeon_update_display_priority()
1527 !(rdev->flags & RADEON_IS_IGP)) radeon_update_display_priority()
1528 rdev->disp_priority = 2; radeon_update_display_priority()
1530 rdev->disp_priority = 0; radeon_update_display_priority()
1532 rdev->disp_priority = radeon_disp_priority; radeon_update_display_priority()
1539 static void radeon_afmt_init(struct radeon_device *rdev) radeon_afmt_init() argument
1544 rdev->mode_info.afmt[i] = NULL; radeon_afmt_init()
1546 if (ASIC_IS_NODCE(rdev)) { radeon_afmt_init()
1548 } else if (ASIC_IS_DCE4(rdev)) { radeon_afmt_init()
1564 if (ASIC_IS_DCE8(rdev)) radeon_afmt_init()
1566 else if (ASIC_IS_DCE6(rdev)) radeon_afmt_init()
1568 else if (ASIC_IS_DCE5(rdev)) radeon_afmt_init()
1570 else if (ASIC_IS_DCE41(rdev)) radeon_afmt_init()
1577 rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL); radeon_afmt_init()
1578 if (rdev->mode_info.afmt[i]) { radeon_afmt_init()
1579 rdev->mode_info.afmt[i]->offset = eg_offsets[i]; radeon_afmt_init()
1580 rdev->mode_info.afmt[i]->id = i; radeon_afmt_init()
1583 } else if (ASIC_IS_DCE3(rdev)) { radeon_afmt_init()
1585 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL); radeon_afmt_init()
1586 if (rdev->mode_info.afmt[0]) { radeon_afmt_init()
1587 rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0; radeon_afmt_init()
1588 rdev->mode_info.afmt[0]->id = 0; radeon_afmt_init()
1590 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL); radeon_afmt_init()
1591 if (rdev->mode_info.afmt[1]) { radeon_afmt_init()
1592 rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1; radeon_afmt_init()
1593 rdev->mode_info.afmt[1]->id = 1; radeon_afmt_init()
1595 } else if (ASIC_IS_DCE2(rdev)) { radeon_afmt_init()
1597 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL); radeon_afmt_init()
1598 if (rdev->mode_info.afmt[0]) { radeon_afmt_init()
1599 rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0; radeon_afmt_init()
1600 rdev->mode_info.afmt[0]->id = 0; radeon_afmt_init()
1603 if (rdev->family >= CHIP_R600) { radeon_afmt_init()
1604 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL); radeon_afmt_init()
1605 if (rdev->mode_info.afmt[1]) { radeon_afmt_init()
1606 rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1; radeon_afmt_init()
1607 rdev->mode_info.afmt[1]->id = 1; radeon_afmt_init()
1613 static void radeon_afmt_fini(struct radeon_device *rdev) radeon_afmt_fini() argument
1618 kfree(rdev->mode_info.afmt[i]); radeon_afmt_fini()
1619 rdev->mode_info.afmt[i] = NULL; radeon_afmt_fini()
1623 int radeon_modeset_init(struct radeon_device *rdev) radeon_modeset_init() argument
1628 drm_mode_config_init(rdev->ddev); radeon_modeset_init()
1629 rdev->mode_info.mode_config_initialized = true; radeon_modeset_init()
1631 rdev->ddev->mode_config.funcs = &radeon_mode_funcs; radeon_modeset_init()
1633 if (ASIC_IS_DCE5(rdev)) { radeon_modeset_init()
1634 rdev->ddev->mode_config.max_width = 16384; radeon_modeset_init()
1635 rdev->ddev->mode_config.max_height = 16384; radeon_modeset_init()
1636 } else if (ASIC_IS_AVIVO(rdev)) { radeon_modeset_init()
1637 rdev->ddev->mode_config.max_width = 8192; radeon_modeset_init()
1638 rdev->ddev->mode_config.max_height = 8192; radeon_modeset_init()
1640 rdev->ddev->mode_config.max_width = 4096; radeon_modeset_init()
1641 rdev->ddev->mode_config.max_height = 4096; radeon_modeset_init()
1644 rdev->ddev->mode_config.preferred_depth = 24; radeon_modeset_init()
1645 rdev->ddev->mode_config.prefer_shadow = 1; radeon_modeset_init()
1647 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base; radeon_modeset_init()
1649 ret = radeon_modeset_create_props(rdev); radeon_modeset_init()
1655 radeon_i2c_init(rdev); radeon_modeset_init()
1658 if (!rdev->is_atom_bios) { radeon_modeset_init()
1660 radeon_combios_check_hardcoded_edid(rdev); radeon_modeset_init()
1664 for (i = 0; i < rdev->num_crtc; i++) { radeon_modeset_init()
1665 radeon_crtc_init(rdev->ddev, i); radeon_modeset_init()
1669 ret = radeon_setup_enc_conn(rdev->ddev); radeon_modeset_init()
1675 if (rdev->is_atom_bios) { radeon_modeset_init()
1676 radeon_atom_encoder_init(rdev); radeon_modeset_init()
1677 radeon_atom_disp_eng_pll_init(rdev); radeon_modeset_init()
1681 radeon_hpd_init(rdev); radeon_modeset_init()
1684 radeon_afmt_init(rdev); radeon_modeset_init()
1686 radeon_fbdev_init(rdev); radeon_modeset_init()
1687 drm_kms_helper_poll_init(rdev->ddev); radeon_modeset_init()
1690 ret = radeon_pm_late_init(rdev); radeon_modeset_init()
1695 void radeon_modeset_fini(struct radeon_device *rdev) radeon_modeset_fini() argument
1697 radeon_fbdev_fini(rdev); radeon_modeset_fini()
1698 kfree(rdev->mode_info.bios_hardcoded_edid); radeon_modeset_fini()
1700 if (rdev->mode_info.mode_config_initialized) { radeon_modeset_fini()
1701 radeon_afmt_fini(rdev); radeon_modeset_fini()
1702 drm_kms_helper_poll_fini(rdev->ddev); radeon_modeset_fini()
1703 radeon_hpd_fini(rdev); radeon_modeset_fini()
1704 drm_mode_config_cleanup(rdev->ddev); radeon_modeset_fini()
1705 rdev->mode_info.mode_config_initialized = false; radeon_modeset_fini()
1708 radeon_i2c_fini(rdev); radeon_modeset_fini()
1728 struct radeon_device *rdev = dev->dev_private; radeon_crtc_scaling_mode_fixup() local
1767 if (ASIC_IS_AVIVO(rdev) && radeon_crtc_scaling_mode_fixup()
1862 struct radeon_device *rdev = dev->dev_private; radeon_get_crtc_scanoutpos() local
1870 if (ASIC_IS_DCE4(rdev)) { radeon_get_crtc_scanoutpos()
1913 } else if (ASIC_IS_AVIVO(rdev)) { radeon_get_crtc_scanoutpos()
1992 vbl_start -= rdev->mode_info.crtcs[pipe]->lb_vblank_lead_lines; radeon_get_crtc_scanoutpos()
H A Dsumo_dpm.h191 void sumo_gfx_clockgating_initialize(struct radeon_device *rdev);
192 void sumo_program_vc(struct radeon_device *rdev, u32 vrc);
193 void sumo_clear_vc(struct radeon_device *rdev);
194 void sumo_program_sstp(struct radeon_device *rdev);
195 void sumo_take_smu_control(struct radeon_device *rdev, bool enable);
196 void sumo_construct_sclk_voltage_mapping_table(struct radeon_device *rdev,
199 void sumo_construct_vid_mapping_table(struct radeon_device *rdev,
202 u32 sumo_convert_vid2_to_vid7(struct radeon_device *rdev,
206 u32 sumo_get_sleep_divider_id_from_clock(struct radeon_device *rdev,
211 void sumo_initialize_m3_arb(struct radeon_device *rdev);
212 void sumo_smu_pg_init(struct radeon_device *rdev);
213 void sumo_set_tdp_limit(struct radeon_device *rdev, u32 index, u32 tdp_limit);
214 void sumo_smu_notify_alt_vddnb_change(struct radeon_device *rdev,
216 void sumo_boost_state_enable(struct radeon_device *rdev, bool enable);
217 void sumo_enable_boost_timer(struct radeon_device *rdev);
218 u32 sumo_get_running_fw_version(struct radeon_device *rdev);
H A Duvd_v2_2.c34 * @rdev: radeon_device pointer
39 void uvd_v2_2_fence_emit(struct radeon_device *rdev, uvd_v2_2_fence_emit() argument
42 struct radeon_ring *ring = &rdev->ring[fence->ring]; uvd_v2_2_fence_emit()
43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; uvd_v2_2_fence_emit()
65 * @rdev: radeon_device pointer
72 bool uvd_v2_2_semaphore_emit(struct radeon_device *rdev, uvd_v2_2_semaphore_emit() argument
94 * @rdev: radeon_device pointer
98 int uvd_v2_2_resume(struct radeon_device *rdev) uvd_v2_2_resume() argument
105 if (rdev->family == CHIP_RV770) uvd_v2_2_resume()
106 return uvd_v1_0_resume(rdev); uvd_v2_2_resume()
108 r = radeon_uvd_resume(rdev); uvd_v2_2_resume()
113 addr = rdev->uvd.gpu_addr >> 3; uvd_v2_2_resume()
114 size = RADEON_GPU_PAGE_ALIGN(rdev->uvd_fw->size + 4) >> 3; uvd_v2_2_resume()
129 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; uvd_v2_2_resume()
133 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; uvd_v2_2_resume()
137 switch (rdev->family) { uvd_v2_2_resume()
H A Dvce_v2_0.c38 static void vce_v2_0_set_sw_cg(struct radeon_device *rdev, bool gated) vce_v2_0_set_sw_cg() argument
73 static void vce_v2_0_set_dyn_cg(struct radeon_device *rdev, bool gated) vce_v2_0_set_dyn_cg() argument
102 static void vce_v2_0_disable_cg(struct radeon_device *rdev) vce_v2_0_disable_cg() argument
107 void vce_v2_0_enable_mgcg(struct radeon_device *rdev, bool enable) vce_v2_0_enable_mgcg() argument
111 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_VCE_MGCG)) { vce_v2_0_enable_mgcg()
113 vce_v2_0_set_sw_cg(rdev, true); vce_v2_0_enable_mgcg()
115 vce_v2_0_set_dyn_cg(rdev, true); vce_v2_0_enable_mgcg()
117 vce_v2_0_disable_cg(rdev); vce_v2_0_enable_mgcg()
120 vce_v2_0_set_sw_cg(rdev, false); vce_v2_0_enable_mgcg()
122 vce_v2_0_set_dyn_cg(rdev, false); vce_v2_0_enable_mgcg()
126 static void vce_v2_0_init_cg(struct radeon_device *rdev) vce_v2_0_init_cg() argument
147 unsigned vce_v2_0_bo_size(struct radeon_device *rdev) vce_v2_0_bo_size() argument
149 WARN_ON(rdev->vce_fw->size > VCE_V2_0_FW_SIZE); vce_v2_0_bo_size()
153 int vce_v2_0_resume(struct radeon_device *rdev) vce_v2_0_resume() argument
155 uint64_t addr = rdev->vce.gpu_addr; vce_v2_0_resume()
191 vce_v2_0_init_cg(rdev); vce_v2_0_resume()
H A Datombios_crtc.c39 struct radeon_device *rdev = dev->dev_private; atombios_overscan_setup() local
76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_overscan_setup()
82 struct radeon_device *rdev = dev->dev_private; atombios_scaler_setup() local
92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id) atombios_scaler_setup()
149 if (ASIC_IS_AVIVO(rdev)) atombios_scaler_setup()
156 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_scaler_setup()
158 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) { atombios_scaler_setup()
159 atom_rv515_force_tv_scaler(rdev, radeon_crtc); atombios_scaler_setup()
167 struct radeon_device *rdev = dev->dev_private; atombios_lock_crtc() local
177 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_lock_crtc()
184 struct radeon_device *rdev = dev->dev_private; atombios_enable_crtc() local
193 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_enable_crtc()
200 struct radeon_device *rdev = dev->dev_private; atombios_enable_crtc_memreq() local
209 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_enable_crtc_memreq()
226 struct radeon_device *rdev = dev->dev_private; atombios_blank_crtc() local
233 if (ASIC_IS_DCE8(rdev)) { atombios_blank_crtc()
241 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_blank_crtc()
243 if (ASIC_IS_DCE8(rdev)) { atombios_blank_crtc()
252 struct radeon_device *rdev = dev->dev_private; atombios_powergate_crtc() local
261 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_powergate_crtc()
267 struct radeon_device *rdev = dev->dev_private; atombios_crtc_dpms() local
274 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev)) atombios_crtc_dpms()
286 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev)) atombios_crtc_dpms()
293 radeon_pm_compute_clocks(rdev); atombios_crtc_dpms()
302 struct radeon_device *rdev = dev->dev_private; atombios_set_crtc_dtd_timing() local
341 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_crtc_dtd_timing()
349 struct radeon_device *rdev = dev->dev_private; atombios_crtc_set_timing() local
387 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_crtc_set_timing()
390 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id) atombios_disable_ss() argument
394 if (ASIC_IS_DCE4(rdev)) { atombios_disable_ss()
410 } else if (ASIC_IS_AVIVO(rdev)) { atombios_disable_ss()
438 static void atombios_crtc_program_ss(struct radeon_device *rdev, atombios_crtc_program_ss() argument
459 for (i = 0; i < rdev->num_crtc; i++) { atombios_crtc_program_ss()
460 if (rdev->mode_info.crtcs[i] && atombios_crtc_program_ss()
461 rdev->mode_info.crtcs[i]->enabled && atombios_crtc_program_ss()
463 pll_id == rdev->mode_info.crtcs[i]->pll_id) { atombios_crtc_program_ss()
475 if (ASIC_IS_DCE5(rdev)) { atombios_crtc_program_ss()
494 } else if (ASIC_IS_DCE4(rdev)) { atombios_crtc_program_ss()
513 } else if (ASIC_IS_DCE3(rdev)) { atombios_crtc_program_ss()
521 } else if (ASIC_IS_AVIVO(rdev)) { atombios_crtc_program_ss()
524 atombios_disable_ss(rdev, pll_id); atombios_crtc_program_ss()
535 atombios_disable_ss(rdev, pll_id); atombios_crtc_program_ss()
544 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_crtc_program_ss()
557 struct radeon_device *rdev = dev->dev_private; atombios_adjust_pll() local
571 if (ASIC_IS_AVIVO(rdev)) { atombios_adjust_pll()
572 if ((rdev->family == CHIP_RS600) || atombios_adjust_pll()
573 (rdev->family == CHIP_RS690) || atombios_adjust_pll()
574 (rdev->family == CHIP_RS740)) atombios_adjust_pll()
578 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */ atombios_adjust_pll()
583 if (rdev->family < CHIP_RV770) atombios_adjust_pll()
586 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev)) atombios_adjust_pll()
589 if ((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880)) atombios_adjust_pll()
591 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000) atombios_adjust_pll()
626 if (ASIC_IS_AVIVO(rdev)) atombios_adjust_pll()
632 if (ASIC_IS_AVIVO(rdev)) { atombios_adjust_pll()
669 if (ASIC_IS_DCE3(rdev)) { atombios_adjust_pll()
675 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, atombios_adjust_pll()
693 atom_execute_table(rdev->mode_info.atom_context, atombios_adjust_pll()
726 atom_execute_table(rdev->mode_info.atom_context, atombios_adjust_pll()
765 static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev, atombios_crtc_set_disp_eng_pll() argument
775 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, atombios_crtc_set_disp_eng_pll()
795 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev)) atombios_crtc_set_disp_eng_pll()
797 else if (ASIC_IS_DCE6(rdev)) atombios_crtc_set_disp_eng_pll()
811 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_crtc_set_disp_eng_pll()
829 struct radeon_device *rdev = dev->dev_private; atombios_crtc_program_pll() local
836 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, atombios_crtc_program_pll()
951 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_crtc_program_pll()
958 struct radeon_device *rdev = dev->dev_private; atombios_crtc_prepare_pll() local
988 if (ASIC_IS_DCE4(rdev)) atombios_crtc_prepare_pll()
990 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss, atombios_crtc_prepare_pll()
996 radeon_atombios_get_ppll_ss_info(rdev, atombios_crtc_prepare_pll()
1001 radeon_atombios_get_ppll_ss_info(rdev, atombios_crtc_prepare_pll()
1006 radeon_atombios_get_ppll_ss_info(rdev, atombios_crtc_prepare_pll()
1015 if (ASIC_IS_DCE4(rdev)) atombios_crtc_prepare_pll()
1017 radeon_atombios_get_asic_ss_info(rdev, atombios_crtc_prepare_pll()
1023 radeon_atombios_get_ppll_ss_info(rdev, atombios_crtc_prepare_pll()
1028 if (ASIC_IS_DCE4(rdev)) atombios_crtc_prepare_pll()
1030 radeon_atombios_get_asic_ss_info(rdev, atombios_crtc_prepare_pll()
1036 if (ASIC_IS_DCE4(rdev)) atombios_crtc_prepare_pll()
1038 radeon_atombios_get_asic_ss_info(rdev, atombios_crtc_prepare_pll()
1058 struct radeon_device *rdev = dev->dev_private; atombios_crtc_set_pll() local
1068 if (ASIC_IS_DCE5(rdev) && atombios_crtc_set_pll()
1075 pll = &rdev->clock.p1pll; atombios_crtc_set_pll()
1078 pll = &rdev->clock.p2pll; atombios_crtc_set_pll()
1083 pll = &rdev->clock.dcpll; atombios_crtc_set_pll()
1096 else if (ASIC_IS_AVIVO(rdev)) atombios_crtc_set_pll()
1103 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id, atombios_crtc_set_pll()
1113 if (ASIC_IS_DCE4(rdev)) { atombios_crtc_set_pll()
1130 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id, atombios_crtc_set_pll()
1141 struct radeon_device *rdev = dev->dev_private; dce4_crtc_do_set_base() local
1265 if (rdev->family >= CHIP_TAHITI) { dce4_crtc_do_set_base()
1268 if (rdev->family >= CHIP_BONAIRE) { dce4_crtc_do_set_base()
1285 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3; dce4_crtc_do_set_base()
1300 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3; dce4_crtc_do_set_base()
1306 if (rdev->family >= CHIP_CAYMAN) dce4_crtc_do_set_base()
1307 tmp = rdev->config.cayman.tile_config; dce4_crtc_do_set_base()
1309 tmp = rdev->config.evergreen.tile_config; dce4_crtc_do_set_base()
1330 if (rdev->family >= CHIP_BONAIRE) { dce4_crtc_do_set_base()
1337 if (rdev->family >= CHIP_BONAIRE) { dce4_crtc_do_set_base()
1341 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f; dce4_crtc_do_set_base()
1344 } else if ((rdev->family == CHIP_TAHITI) || dce4_crtc_do_set_base()
1345 (rdev->family == CHIP_PITCAIRN)) dce4_crtc_do_set_base()
1347 else if ((rdev->family == CHIP_VERDE) || dce4_crtc_do_set_base()
1348 (rdev->family == CHIP_OLAND) || dce4_crtc_do_set_base()
1349 (rdev->family == CHIP_HAINAN)) /* for completeness. HAINAN has no display hw */ dce4_crtc_do_set_base()
1409 if (rdev->family >= CHIP_BONAIRE) dce4_crtc_do_set_base()
1421 if ((rdev->family >= CHIP_BONAIRE) && dce4_crtc_do_set_base()
1447 radeon_bandwidth_update(rdev); dce4_crtc_do_set_base()
1458 struct radeon_device *rdev = dev->dev_private; avivo_crtc_do_set_base() local
1563 if (rdev->family >= CHIP_R600) { avivo_crtc_do_set_base()
1581 if (rdev->family >= CHIP_RV770) { avivo_crtc_do_set_base()
1595 if (rdev->family >= CHIP_R600) avivo_crtc_do_set_base()
1647 radeon_bandwidth_update(rdev); avivo_crtc_do_set_base()
1656 struct radeon_device *rdev = dev->dev_private; atombios_crtc_set_base() local
1658 if (ASIC_IS_DCE4(rdev)) atombios_crtc_set_base()
1660 else if (ASIC_IS_AVIVO(rdev)) atombios_crtc_set_base()
1671 struct radeon_device *rdev = dev->dev_private; atombios_crtc_set_base_atomic() local
1673 if (ASIC_IS_DCE4(rdev)) atombios_crtc_set_base_atomic()
1675 else if (ASIC_IS_AVIVO(rdev)) atombios_crtc_set_base_atomic()
1685 struct radeon_device *rdev = dev->dev_private; radeon_legacy_atom_fixup() local
1742 struct radeon_device *rdev = dev->dev_private; radeon_get_shared_dp_ppll() local
1753 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) && radeon_get_shared_dp_ppll()
1777 struct radeon_device *rdev = dev->dev_private; radeon_get_shared_nondp_ppll() local
1794 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) && radeon_get_shared_nondp_ppll()
1856 struct radeon_device *rdev = dev->dev_private; radeon_atom_pick_pll() local
1862 if (ASIC_IS_DCE8(rdev)) { radeon_atom_pick_pll()
1864 if (rdev->clock.dp_extclk) radeon_atom_pick_pll()
1880 if ((rdev->family == CHIP_KABINI) || radeon_atom_pick_pll()
1881 (rdev->family == CHIP_MULLINS)) { radeon_atom_pick_pll()
1902 } else if (ASIC_IS_DCE61(rdev)) { radeon_atom_pick_pll()
1912 if (rdev->clock.dp_extclk) radeon_atom_pick_pll()
1935 } else if (ASIC_IS_DCE41(rdev)) { radeon_atom_pick_pll()
1938 if (rdev->clock.dp_extclk) radeon_atom_pick_pll()
1949 } else if (ASIC_IS_DCE4(rdev)) { radeon_atom_pick_pll()
1961 if (rdev->clock.dp_extclk) radeon_atom_pick_pll()
1964 else if (ASIC_IS_DCE6(rdev)) radeon_atom_pick_pll()
1967 else if (ASIC_IS_DCE5(rdev)) radeon_atom_pick_pll()
2010 void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev) radeon_atom_disp_eng_pll_init() argument
2013 if (ASIC_IS_DCE6(rdev)) radeon_atom_disp_eng_pll_init()
2014 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk); radeon_atom_disp_eng_pll_init()
2015 else if (ASIC_IS_DCE4(rdev)) { radeon_atom_disp_eng_pll_init()
2017 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss, radeon_atom_disp_eng_pll_init()
2019 rdev->clock.default_dispclk); radeon_atom_disp_eng_pll_init()
2021 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss); radeon_atom_disp_eng_pll_init()
2023 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk); radeon_atom_disp_eng_pll_init()
2025 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss); radeon_atom_disp_eng_pll_init()
2037 struct radeon_device *rdev = dev->dev_private; atombios_crtc_mode_set() local
2051 if (ASIC_IS_DCE4(rdev)) atombios_crtc_mode_set()
2053 else if (ASIC_IS_AVIVO(rdev)) { atombios_crtc_mode_set()
2118 struct radeon_device *rdev = dev->dev_private; atombios_crtc_prepare() local
2121 if (ASIC_IS_DCE6(rdev)) atombios_crtc_prepare()
2138 struct radeon_device *rdev = dev->dev_private; atombios_crtc_disable() local
2159 if (ASIC_IS_DCE4(rdev)) atombios_crtc_disable()
2161 else if (ASIC_IS_AVIVO(rdev)) atombios_crtc_disable()
2164 if (ASIC_IS_DCE6(rdev)) atombios_crtc_disable()
2167 for (i = 0; i < rdev->num_crtc; i++) { atombios_crtc_disable()
2168 if (rdev->mode_info.crtcs[i] && atombios_crtc_disable()
2169 rdev->mode_info.crtcs[i]->enabled && atombios_crtc_disable()
2171 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) { atombios_crtc_disable()
2188 if ((rdev->family == CHIP_ARUBA) || atombios_crtc_disable()
2189 (rdev->family == CHIP_KAVERI) || atombios_crtc_disable()
2190 (rdev->family == CHIP_BONAIRE) || atombios_crtc_disable()
2191 (rdev->family == CHIP_HAWAII)) atombios_crtc_disable()
2220 struct radeon_device *rdev = dev->dev_private; radeon_atombios_init_crtc() local
2222 if (ASIC_IS_DCE4(rdev)) { radeon_atombios_init_crtc()
H A Dbtc_dpm.h38 void btc_read_arb_registers(struct radeon_device *rdev);
39 void btc_program_mgcg_hw_sequence(struct radeon_device *rdev,
41 void btc_skip_blacklist_clocks(struct radeon_device *rdev,
44 void btc_adjust_clock_combinations(struct radeon_device *rdev,
51 void btc_apply_voltage_delta_rules(struct radeon_device *rdev,
54 bool btc_dpm_enabled(struct radeon_device *rdev);
55 int btc_reset_to_default(struct radeon_device *rdev);
56 void btc_notify_uvd_to_smc(struct radeon_device *rdev,
H A Dradeon_kfd.h39 void radeon_kfd_suspend(struct radeon_device *rdev);
40 int radeon_kfd_resume(struct radeon_device *rdev);
41 void radeon_kfd_interrupt(struct radeon_device *rdev,
43 void radeon_kfd_device_probe(struct radeon_device *rdev);
44 void radeon_kfd_device_init(struct radeon_device *rdev);
45 void radeon_kfd_device_fini(struct radeon_device *rdev);
H A Dradeon_encoders.c43 struct radeon_device *rdev = dev->dev_private; radeon_encoder_clones() local
50 if (rdev->family >= CHIP_R600) radeon_encoder_clones()
88 struct radeon_device *rdev = dev->dev_private; radeon_get_encoder_enum() local
99 if ((rdev->family == CHIP_RS300) || radeon_get_encoder_enum()
100 (rdev->family == CHIP_RS400) || radeon_get_encoder_enum()
101 (rdev->family == CHIP_RS480)) radeon_get_encoder_enum()
103 else if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
109 if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
112 /*if (rdev->family == CHIP_R200) radeon_get_encoder_enum()
119 if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
127 if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
133 if ((rdev->family == CHIP_RS300) || radeon_get_encoder_enum()
134 (rdev->family == CHIP_RS400) || radeon_get_encoder_enum()
135 (rdev->family == CHIP_RS480)) radeon_get_encoder_enum()
137 else if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
144 if ((rdev->family == CHIP_RS600) || radeon_get_encoder_enum()
145 (rdev->family == CHIP_RS690) || radeon_get_encoder_enum()
146 (rdev->family == CHIP_RS740)) radeon_get_encoder_enum()
148 else if (ASIC_IS_AVIVO(rdev)) radeon_get_encoder_enum()
165 struct radeon_device *rdev = dev->dev_private; radeon_encoder_add_backlight() local
178 if ((rdev->pdev->device == 0x9583) && radeon_encoder_add_backlight()
179 (rdev->pdev->subsystem_vendor == 0x1734) && radeon_encoder_add_backlight()
180 (rdev->pdev->subsystem_device == 0x1107)) radeon_encoder_add_backlight()
185 else if (rdev->family < CHIP_R600) radeon_encoder_add_backlight()
193 if (rdev->is_atom_bios) radeon_encoder_add_backlight()
324 struct radeon_device *rdev = dev->dev_private; radeon_panel_mode_fixup() local
336 if (ASIC_IS_AVIVO(rdev)) { radeon_panel_mode_fixup()
351 if (ASIC_IS_AVIVO(rdev)) { radeon_panel_mode_fixup()
370 struct radeon_device *rdev = dev->dev_private; radeon_dig_monitor_is_duallink() local
388 if (ASIC_IS_DCE6(rdev) && drm_detect_hdmi_monitor(radeon_connector_edid(connector))) { radeon_dig_monitor_is_duallink()
413 if (ASIC_IS_DCE6(rdev) && drm_detect_hdmi_monitor(radeon_connector_edid(connector))) { radeon_dig_monitor_is_duallink()
H A Dradeon_mn.c41 struct radeon_device *rdev; member in struct:radeon_mn
48 /* protected by rdev->mn_lock */
71 struct radeon_device *rdev = rmn->rdev; radeon_mn_destroy() local
75 mutex_lock(&rdev->mn_lock); radeon_mn_destroy()
89 mutex_unlock(&rdev->mn_lock); radeon_mn_destroy()
179 * @rdev: radeon device pointer
183 static struct radeon_mn *radeon_mn_get(struct radeon_device *rdev) radeon_mn_get() argument
190 mutex_lock(&rdev->mn_lock); radeon_mn_get()
192 hash_for_each_possible(rdev->mn_hash, rmn, node, (unsigned long)mm) radeon_mn_get()
202 rmn->rdev = rdev; radeon_mn_get()
212 hash_add(rdev->mn_hash, &rmn->node, (unsigned long)mm); radeon_mn_get()
215 mutex_unlock(&rdev->mn_lock); radeon_mn_get()
221 mutex_unlock(&rdev->mn_lock); radeon_mn_get()
240 struct radeon_device *rdev = bo->rdev; radeon_mn_register() local
246 rmn = radeon_mn_get(rdev); radeon_mn_register()
295 struct radeon_device *rdev = bo->rdev; radeon_mn_unregister() local
299 mutex_lock(&rdev->mn_lock); radeon_mn_unregister()
302 mutex_unlock(&rdev->mn_lock); radeon_mn_unregister()
321 mutex_unlock(&rdev->mn_lock); radeon_mn_unregister()
H A Dtrinity_dpm.h122 int trinity_dpm_bapm_enable(struct radeon_device *rdev, bool enable);
123 int trinity_dpm_config(struct radeon_device *rdev, bool enable);
124 int trinity_uvd_dpm_config(struct radeon_device *rdev);
125 int trinity_dpm_force_state(struct radeon_device *rdev, u32 n);
126 int trinity_dpm_n_levels_disabled(struct radeon_device *rdev, u32 n);
127 int trinity_dpm_no_forced_level(struct radeon_device *rdev);
128 int trinity_dce_enable_voltage_adjustment(struct radeon_device *rdev,
130 int trinity_gfx_dynamic_mgpg_config(struct radeon_device *rdev);
131 void trinity_acquire_mutex(struct radeon_device *rdev);
132 void trinity_release_mutex(struct radeon_device *rdev);
H A Dradeon_kfd.c174 void radeon_kfd_device_probe(struct radeon_device *rdev) radeon_kfd_device_probe() argument
177 rdev->kfd = kgd2kfd->probe((struct kgd_dev *)rdev, radeon_kfd_device_probe()
178 rdev->pdev, &kfd2kgd); radeon_kfd_device_probe()
181 void radeon_kfd_device_init(struct radeon_device *rdev) radeon_kfd_device_init() argument
183 if (rdev->kfd) { radeon_kfd_device_init()
191 radeon_doorbell_get_kfd_info(rdev, radeon_kfd_device_init()
196 kgd2kfd->device_init(rdev->kfd, &gpu_resources); radeon_kfd_device_init()
200 void radeon_kfd_device_fini(struct radeon_device *rdev) radeon_kfd_device_fini() argument
202 if (rdev->kfd) { radeon_kfd_device_fini()
203 kgd2kfd->device_exit(rdev->kfd); radeon_kfd_device_fini()
204 rdev->kfd = NULL; radeon_kfd_device_fini()
208 void radeon_kfd_interrupt(struct radeon_device *rdev, const void *ih_ring_entry) radeon_kfd_interrupt() argument
210 if (rdev->kfd) radeon_kfd_interrupt()
211 kgd2kfd->interrupt(rdev->kfd, ih_ring_entry); radeon_kfd_interrupt()
214 void radeon_kfd_suspend(struct radeon_device *rdev) radeon_kfd_suspend() argument
216 if (rdev->kfd) radeon_kfd_suspend()
217 kgd2kfd->suspend(rdev->kfd); radeon_kfd_suspend()
220 int radeon_kfd_resume(struct radeon_device *rdev) radeon_kfd_resume() argument
224 if (rdev->kfd) radeon_kfd_resume()
225 r = kgd2kfd->resume(rdev->kfd); radeon_kfd_resume()
234 struct radeon_device *rdev = (struct radeon_device *)kgd; alloc_gtt_mem() local
246 r = radeon_bo_create(rdev, size, PAGE_SIZE, true, RADEON_GEM_DOMAIN_GTT, alloc_gtt_mem()
249 dev_err(rdev->dev, alloc_gtt_mem()
257 dev_err(rdev->dev, "(%d) failed to reserve bo for amdkfd\n", r); alloc_gtt_mem()
264 dev_err(rdev->dev, "(%d) failed to pin bo for amdkfd\n", r); alloc_gtt_mem()
271 dev_err(rdev->dev, alloc_gtt_mem()
307 struct radeon_device *rdev = (struct radeon_device *)kgd; get_vmem_size() local
311 return rdev->mc.real_vram_size; get_vmem_size()
316 struct radeon_device *rdev = (struct radeon_device *)kgd; get_gpu_clock_counter() local
318 return rdev->asic->get_gpu_clock_counter(rdev); get_gpu_clock_counter()
323 struct radeon_device *rdev = (struct radeon_device *)kgd; get_max_engine_clock_in_mhz() local
326 return rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk / 100; get_max_engine_clock_in_mhz()
336 struct radeon_device *rdev = get_radeon_device(kgd); write_register() local
338 writel(value, (void __iomem *)(rdev->rmmio + offset)); write_register()
343 struct radeon_device *rdev = get_radeon_device(kgd); read_register() local
345 return readl((void __iomem *)(rdev->rmmio + offset)); read_register()
351 struct radeon_device *rdev = get_radeon_device(kgd); lock_srbm() local
354 mutex_lock(&rdev->srbm_mutex); lock_srbm()
360 struct radeon_device *rdev = get_radeon_device(kgd); unlock_srbm() local
363 mutex_unlock(&rdev->srbm_mutex); unlock_srbm()
762 struct radeon_device *rdev = get_radeon_device(kgd); kgd_wave_control_execute() local
765 mutex_lock(&rdev->grbm_idx_mutex); kgd_wave_control_execute()
777 mutex_unlock(&rdev->grbm_idx_mutex); kgd_wave_control_execute()
792 struct radeon_device *rdev = (struct radeon_device *) kgd; get_atc_vmid_pasid_mapping_valid() local
802 struct radeon_device *rdev = (struct radeon_device *) kgd; get_atc_vmid_pasid_mapping_pasid() local
810 struct radeon_device *rdev = (struct radeon_device *) kgd; write_vmid_invalidate_request() local
817 struct radeon_device *rdev = (struct radeon_device *) kgd; get_fw_version() local
820 BUG_ON(kgd == NULL || rdev->mec_fw == NULL); get_fw_version()
824 hdr = (const union radeon_firmware_header *) rdev->pfp_fw->data; get_fw_version()
828 hdr = (const union radeon_firmware_header *) rdev->me_fw->data; get_fw_version()
832 hdr = (const union radeon_firmware_header *) rdev->ce_fw->data; get_fw_version()
836 hdr = (const union radeon_firmware_header *) rdev->mec_fw->data; get_fw_version()
841 rdev->mec2_fw->data; get_fw_version()
845 hdr = (const union radeon_firmware_header *) rdev->rlc_fw->data; get_fw_version()
851 rdev->sdma_fw->data; get_fw_version()
H A Drv770_smc.c277 static int rv770_set_smc_sram_address(struct radeon_device *rdev, rv770_set_smc_sram_address() argument
295 int rv770_copy_bytes_to_smc(struct radeon_device *rdev, rv770_copy_bytes_to_smc() argument
311 spin_lock_irqsave(&rdev->smc_idx_lock, flags); rv770_copy_bytes_to_smc()
316 ret = rv770_set_smc_sram_address(rdev, addr, limit); rv770_copy_bytes_to_smc()
331 ret = rv770_set_smc_sram_address(rdev, addr, limit); rv770_copy_bytes_to_smc()
349 ret = rv770_set_smc_sram_address(rdev, addr, limit); rv770_copy_bytes_to_smc()
357 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); rv770_copy_bytes_to_smc()
362 static int rv770_program_interrupt_vectors(struct radeon_device *rdev, rv770_program_interrupt_vectors() argument
392 void rv770_start_smc(struct radeon_device *rdev) rv770_start_smc() argument
397 void rv770_reset_smc(struct radeon_device *rdev) rv770_reset_smc() argument
402 void rv770_stop_smc_clock(struct radeon_device *rdev) rv770_stop_smc_clock() argument
407 void rv770_start_smc_clock(struct radeon_device *rdev) rv770_start_smc_clock() argument
412 bool rv770_is_smc_running(struct radeon_device *rdev) rv770_is_smc_running() argument
424 PPSMC_Result rv770_send_msg_to_smc(struct radeon_device *rdev, PPSMC_Msg msg) rv770_send_msg_to_smc() argument
430 if (!rv770_is_smc_running(rdev)) rv770_send_msg_to_smc()
435 for (i = 0; i < rdev->usec_timeout; i++) { rv770_send_msg_to_smc()
450 PPSMC_Result rv770_wait_for_smc_inactive(struct radeon_device *rdev) rv770_wait_for_smc_inactive() argument
455 if (!rv770_is_smc_running(rdev)) rv770_wait_for_smc_inactive()
458 for (i = 0; i < rdev->usec_timeout; i++) { rv770_wait_for_smc_inactive()
467 static void rv770_clear_smc_sram(struct radeon_device *rdev, u16 limit) rv770_clear_smc_sram() argument
472 spin_lock_irqsave(&rdev->smc_idx_lock, flags); rv770_clear_smc_sram()
474 rv770_set_smc_sram_address(rdev, i, limit); rv770_clear_smc_sram()
477 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); rv770_clear_smc_sram()
480 int rv770_load_smc_ucode(struct radeon_device *rdev, rv770_load_smc_ucode() argument
491 if (!rdev->smc_fw) rv770_load_smc_ucode()
494 rv770_clear_smc_sram(rdev, limit); rv770_load_smc_ucode()
496 switch (rdev->family) { rv770_load_smc_ucode()
588 ucode_data = (const u8 *)rdev->smc_fw->data; rv770_load_smc_ucode()
589 ret = rv770_copy_bytes_to_smc(rdev, ucode_start_address, rv770_load_smc_ucode()
595 ret = rv770_program_interrupt_vectors(rdev, int_vect_start_address, rv770_load_smc_ucode()
603 int rv770_read_smc_sram_dword(struct radeon_device *rdev, rv770_read_smc_sram_dword() argument
609 spin_lock_irqsave(&rdev->smc_idx_lock, flags); rv770_read_smc_sram_dword()
610 ret = rv770_set_smc_sram_address(rdev, smc_address, limit); rv770_read_smc_sram_dword()
613 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); rv770_read_smc_sram_dword()
618 int rv770_write_smc_sram_dword(struct radeon_device *rdev, rv770_write_smc_sram_dword() argument
624 spin_lock_irqsave(&rdev->smc_idx_lock, flags); rv770_write_smc_sram_dword()
625 ret = rv770_set_smc_sram_address(rdev, smc_address, limit); rv770_write_smc_sram_dword()
628 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); rv770_write_smc_sram_dword()
H A Drv730_dpm.c38 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
40 int rv730_populate_sclk_value(struct radeon_device *rdev, rv730_populate_sclk_value() argument
44 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_populate_sclk_value()
52 u32 reference_clock = rdev->clock.spll.reference_freq; rv730_populate_sclk_value()
57 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv730_populate_sclk_value()
95 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv730_populate_sclk_value()
119 int rv730_populate_mclk_value(struct radeon_device *rdev, rv730_populate_mclk_value() argument
123 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_populate_mclk_value()
135 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, rv730_populate_mclk_value()
170 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv730_populate_mclk_value()
172 u32 reference_clock = rdev->clock.mpll.reference_freq; rv730_populate_mclk_value()
198 void rv730_read_clock_registers(struct radeon_device *rdev) rv730_read_clock_registers() argument
200 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_read_clock_registers()
229 int rv730_populate_smc_acpi_state(struct radeon_device *rdev, rv730_populate_smc_acpi_state() argument
232 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_populate_smc_acpi_state()
246 rv770_populate_vddc_value(rdev, pi->acpi_vddc, rv730_populate_smc_acpi_state()
253 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table, rv730_populate_smc_acpi_state()
311 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); rv730_populate_smc_acpi_state()
319 int rv730_populate_smc_initial_state(struct radeon_device *rdev, rv730_populate_smc_initial_state() argument
324 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_populate_smc_initial_state()
362 rv770_get_seq_value(rdev, &initial_state->low); rv730_populate_smc_initial_state()
364 rv770_populate_vddc_value(rdev, rv730_populate_smc_initial_state()
367 rv770_populate_initial_mvdd_value(rdev, rv730_populate_smc_initial_state()
393 void rv730_program_memory_timing_parameters(struct radeon_device *rdev, rv730_program_memory_timing_parameters() argument
406 (POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) | rv730_program_memory_timing_parameters()
407 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) | rv730_program_memory_timing_parameters()
408 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk))); rv730_program_memory_timing_parameters()
415 radeon_atom_set_engine_dram_timings(rdev, rv730_program_memory_timing_parameters()
425 radeon_atom_set_engine_dram_timings(rdev, rv730_program_memory_timing_parameters()
435 radeon_atom_set_engine_dram_timings(rdev, rv730_program_memory_timing_parameters()
451 void rv730_start_dpm(struct radeon_device *rdev) rv730_start_dpm() argument
460 void rv730_stop_dpm(struct radeon_device *rdev) rv730_stop_dpm() argument
464 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled); rv730_stop_dpm()
476 void rv730_program_dcodt(struct radeon_device *rdev, bool use_dcodt) rv730_program_dcodt() argument
478 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_program_dcodt()
495 void rv730_get_odt_values(struct radeon_device *rdev) rv730_get_odt_values() argument
497 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv730_get_odt_values()
H A Dradeon_connectors.c51 struct radeon_device *rdev = dev->dev_private; radeon_connector_hotplug() local
71 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd); radeon_connector_hotplug()
95 if (!radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) { radeon_connector_hotplug()
128 struct radeon_device *rdev = dev->dev_private; radeon_get_monitor_bpc() local
164 else if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) { radeon_get_monitor_bpc()
181 if ((bpc > 8) && !ASIC_IS_DCE4(rdev)) { radeon_get_monitor_bpc()
252 struct radeon_device *rdev = dev->dev_private; radeon_connector_update_scratch_regs() local
275 if (rdev->is_atom_bios) radeon_connector_update_scratch_regs()
320 struct radeon_device *rdev = dev->dev_private; radeon_connector_get_edid() local
356 if ((rdev->flags & RADEON_IS_PX) && (radeon_runtime_pm != 0)) radeon_connector_get_edid()
359 if (rdev->is_atom_bios) { radeon_connector_get_edid()
363 radeon_connector->edid = radeon_bios_get_hardcoded_edid(rdev); radeon_connector_get_edid()
366 radeon_connector->edid = radeon_bios_get_hardcoded_edid(rdev); radeon_connector_get_edid()
568 struct radeon_device *rdev = dev->dev_private; radeon_connector_set_property() local
572 if (property == rdev->mode_info.coherent_mode_property) { radeon_connector_set_property()
594 if (property == rdev->mode_info.audio_property) { radeon_connector_set_property()
609 if (property == rdev->mode_info.dither_property) { radeon_connector_set_property()
624 if (property == rdev->mode_info.underscan_property) { radeon_connector_set_property()
638 if (property == rdev->mode_info.underscan_hborder_property) { radeon_connector_set_property()
652 if (property == rdev->mode_info.underscan_vborder_property) { radeon_connector_set_property()
666 if (property == rdev->mode_info.tv_std_property) { radeon_connector_set_property()
678 if (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom) { radeon_connector_set_property()
690 if (property == rdev->mode_info.load_detect_property) { radeon_connector_set_property()
700 if (property == rdev->mode_info.tmds_pll_property) { radeon_connector_set_property()
715 if (rdev->is_atom_bios) radeon_connector_set_property()
755 if (property == rdev->mode_info.output_csc_property) { radeon_connector_set_property()
888 struct radeon_device *rdev = dev->dev_private; radeon_lvds_detect() local
908 if ((rdev->flags & RADEON_IS_PX) && (radeon_runtime_pm != 0)) radeon_lvds_detect()
1001 struct radeon_device *rdev = dev->dev_private; radeon_vga_mode_valid() local
1005 if ((mode->clock / 10) > rdev->clock.max_pixel_clock) radeon_vga_mode_valid()
1015 struct radeon_device *rdev = dev->dev_private; radeon_vga_detect() local
1083 if ((!rdev->is_atom_bios) && radeon_vga_detect()
1085 rdev->mode_info.bios_hardcoded_edid_size) { radeon_vga_detect()
1115 struct radeon_device *rdev = dev->dev_private; radeon_tv_get_modes() local
1124 if (rdev->family >= CHIP_RS600) radeon_tv_get_modes()
1192 struct radeon_device *rdev = dev->dev_private; radeon_check_hpd_status_unchanged() local
1197 if (rdev->family >= CHIP_R600 radeon_check_hpd_status_unchanged()
1199 if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) radeon_check_hpd_status_unchanged()
1225 struct radeon_device *rdev = dev->dev_private; radeon_dvi_detect() local
1258 schedule_delayed_work(&rdev->hotplug_work, radeon_dvi_detect()
1273 if ((rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) && radeon_dvi_detect()
1314 if (!radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) { radeon_dvi_detect()
1396 if ((!rdev->is_atom_bios) && radeon_dvi_detect()
1398 rdev->mode_info.bios_hardcoded_edid_size) { radeon_dvi_detect()
1471 struct radeon_device *rdev = dev->dev_private; radeon_dvi_mode_valid() local
1478 (rdev->family == CHIP_RV100) && radeon_dvi_mode_valid()
1487 else if (ASIC_IS_DCE6(rdev) && drm_detect_hdmi_monitor(radeon_connector_edid(connector))) { radeon_dvi_mode_valid()
1499 if ((mode->clock / 10) > rdev->clock.max_pixel_clock) radeon_dvi_mode_valid()
1644 struct radeon_device *rdev = dev->dev_private; radeon_connector_is_dp12_capable() local
1646 if (ASIC_IS_DCE5(rdev) && radeon_connector_is_dp12_capable()
1647 (rdev->clock.default_dispclk >= 53900) && radeon_connector_is_dp12_capable()
1659 struct radeon_device *rdev = dev->dev_private; radeon_dp_detect() local
1692 if ((rdev->flags & RADEON_IS_PX) && (radeon_runtime_pm != 0)) radeon_dp_detect()
1725 if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) { radeon_dp_detect()
1768 struct radeon_device *rdev = dev->dev_private; radeon_dp_mode_valid() local
1804 if (ASIC_IS_DCE6(rdev) && drm_detect_hdmi_monitor(radeon_connector_edid(connector))) { radeon_dp_mode_valid()
1862 struct radeon_device *rdev = dev->dev_private; radeon_add_atom_connector() local
1932 radeon_connector->router_bus = radeon_i2c_lookup(rdev, &router->i2c_info); radeon_add_atom_connector()
1944 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
1962 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
1967 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
1969 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
1982 rdev->mode_info.underscan_property, radeon_add_atom_connector()
1985 rdev->mode_info.underscan_hborder_property, radeon_add_atom_connector()
1988 rdev->mode_info.underscan_vborder_property, radeon_add_atom_connector()
1996 rdev->mode_info.dither_property, radeon_add_atom_connector()
2001 rdev->mode_info.audio_property, radeon_add_atom_connector()
2005 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2007 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2019 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
2043 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2049 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
2051 if (ASIC_IS_AVIVO(rdev)) radeon_add_atom_connector()
2055 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2057 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2069 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2075 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
2077 if (ASIC_IS_AVIVO(rdev)) radeon_add_atom_connector()
2081 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2083 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2100 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2106 rdev->mode_info.coherent_mode_property, radeon_add_atom_connector()
2108 if (ASIC_IS_AVIVO(rdev)) { radeon_add_atom_connector()
2110 rdev->mode_info.underscan_property, radeon_add_atom_connector()
2113 rdev->mode_info.underscan_hborder_property, radeon_add_atom_connector()
2116 rdev->mode_info.underscan_vborder_property, radeon_add_atom_connector()
2119 rdev->mode_info.dither_property, radeon_add_atom_connector()
2125 if (ASIC_IS_DCE2(rdev) && (radeon_audio != 0)) { radeon_add_atom_connector()
2127 rdev->mode_info.audio_property, radeon_add_atom_connector()
2134 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
2137 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2139 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2157 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2162 rdev->mode_info.coherent_mode_property, radeon_add_atom_connector()
2164 if (ASIC_IS_AVIVO(rdev)) { radeon_add_atom_connector()
2166 rdev->mode_info.underscan_property, radeon_add_atom_connector()
2169 rdev->mode_info.underscan_hborder_property, radeon_add_atom_connector()
2172 rdev->mode_info.underscan_vborder_property, radeon_add_atom_connector()
2175 rdev->mode_info.dither_property, radeon_add_atom_connector()
2181 if (ASIC_IS_DCE2(rdev) && (radeon_audio != 0)) { radeon_add_atom_connector()
2183 rdev->mode_info.audio_property, radeon_add_atom_connector()
2187 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2189 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2207 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2215 rdev->mode_info.coherent_mode_property, radeon_add_atom_connector()
2217 if (ASIC_IS_AVIVO(rdev)) { radeon_add_atom_connector()
2219 rdev->mode_info.underscan_property, radeon_add_atom_connector()
2222 rdev->mode_info.underscan_hborder_property, radeon_add_atom_connector()
2225 rdev->mode_info.underscan_vborder_property, radeon_add_atom_connector()
2228 rdev->mode_info.dither_property, radeon_add_atom_connector()
2234 if (ASIC_IS_DCE2(rdev) && (radeon_audio != 0)) { radeon_add_atom_connector()
2236 rdev->mode_info.audio_property, radeon_add_atom_connector()
2240 if (ASIC_IS_DCE5(rdev)) radeon_add_atom_connector()
2242 rdev->mode_info.output_csc_property, radeon_add_atom_connector()
2257 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2277 rdev->mode_info.load_detect_property, radeon_add_atom_connector()
2280 rdev->mode_info.tv_std_property, radeon_add_atom_connector()
2281 radeon_atombios_get_tv_info(rdev)); radeon_add_atom_connector()
2296 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_atom_connector()
2338 struct radeon_device *rdev = dev->dev_private; radeon_add_legacy_connector() local
2378 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_legacy_connector()
2384 rdev->mode_info.load_detect_property, radeon_add_legacy_connector()
2396 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_legacy_connector()
2402 rdev->mode_info.load_detect_property, radeon_add_legacy_connector()
2414 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_legacy_connector()
2421 rdev->mode_info.load_detect_property, radeon_add_legacy_connector()
2442 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480) radeon_add_legacy_connector()
2445 rdev->mode_info.load_detect_property, radeon_add_legacy_connector()
2448 rdev->mode_info.tv_std_property, radeon_add_legacy_connector()
2449 radeon_combios_get_tv_info(rdev)); radeon_add_legacy_connector()
2459 radeon_connector->ddc_bus = radeon_i2c_lookup(rdev, i2c_bus); radeon_add_legacy_connector()
2483 struct radeon_device *rdev = dev->dev_private; radeon_setup_mst_connector() local
2487 if (!ASIC_IS_DCE5(rdev)) radeon_setup_mst_connector()
H A Duvd_v4_2.c34 * @rdev: radeon_device pointer
38 int uvd_v4_2_resume(struct radeon_device *rdev) uvd_v4_2_resume() argument
44 addr = rdev->uvd.gpu_addr >> 3; uvd_v4_2_resume()
45 size = RADEON_GPU_PAGE_ALIGN(rdev->uvd_fw->size + 4) >> 3; uvd_v4_2_resume()
60 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; uvd_v4_2_resume()
64 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; uvd_v4_2_resume()
H A Dr600_hdmi.c59 static struct r600_audio_pin r600_audio_status(struct radeon_device *rdev) r600_audio_status() argument
87 dev_err(rdev->dev, "Unknown bits per sample 0x%x, using 16\n", r600_audio_status()
116 struct radeon_device *rdev = container_of(work, struct radeon_device, r600_audio_update_hdmi() local
118 struct drm_device *dev = rdev->ddev; r600_audio_update_hdmi()
119 struct r600_audio_pin audio_status = r600_audio_status(rdev); r600_audio_update_hdmi()
123 if (rdev->audio.pin[0].channels != audio_status.channels || r600_audio_update_hdmi()
124 rdev->audio.pin[0].rate != audio_status.rate || r600_audio_update_hdmi()
125 rdev->audio.pin[0].bits_per_sample != audio_status.bits_per_sample || r600_audio_update_hdmi()
126 rdev->audio.pin[0].status_bits != audio_status.status_bits || r600_audio_update_hdmi()
127 rdev->audio.pin[0].category_code != audio_status.category_code) { r600_audio_update_hdmi()
128 rdev->audio.pin[0] = audio_status; r600_audio_update_hdmi()
141 void r600_audio_enable(struct radeon_device *rdev, r600_audio_enable() argument
171 struct r600_audio_pin *r600_audio_get_pin(struct radeon_device *rdev) r600_audio_get_pin() argument
174 return &rdev->audio.pin[0]; r600_audio_get_pin()
181 struct radeon_device *rdev = dev->dev_private; r600_hdmi_update_acr() local
184 uint32_t acr_ctl = ASIC_IS_DCE3(rdev) ? DCE3_HDMI0_ACR_PACKET_CONTROL : r600_hdmi_update_acr()
217 void r600_set_avi_packet(struct radeon_device *rdev, u32 offset, r600_set_avi_packet() argument
247 struct radeon_device *rdev = dev->dev_private; r600_hdmi_update_audio_infoframe() local
265 struct radeon_device *rdev = dev->dev_private; r600_hdmi_is_audio_buffer_filled() local
298 struct radeon_device *rdev = dev->dev_private; r600_hdmi_audio_workaround() local
314 void r600_hdmi_audio_set_dto(struct radeon_device *rdev, r600_hdmi_audio_set_dto() argument
343 struct radeon_device *rdev = dev->dev_private; r600_set_vbi_packet() local
354 struct radeon_device *rdev = dev->dev_private; r600_set_audio_packet() local
396 struct radeon_device *rdev = dev->dev_private; r600_set_mute() local
414 struct radeon_device *rdev = dev->dev_private; r600_hdmi_update_audio_settings() local
417 struct r600_audio_pin audio = r600_audio_status(rdev); r600_hdmi_update_audio_settings()
472 struct radeon_device *rdev = dev->dev_private; r600_hdmi_enable() local
481 if (!ASIC_IS_DCE3(rdev)) { r600_hdmi_enable()
514 dev_err(rdev->dev, "Invalid encoder for HDMI: 0x%X\n", r600_hdmi_enable()
521 if (rdev->irq.installed) { r600_hdmi_enable()
525 radeon_irq_kms_enable_afmt(rdev, dig->afmt->id); r600_hdmi_enable()
527 radeon_irq_kms_disable_afmt(rdev, dig->afmt->id); r600_hdmi_enable()
H A Dradeon_cursor.c32 struct radeon_device *rdev = crtc->dev->dev_private; radeon_lock_cursor() local
36 if (ASIC_IS_DCE4(rdev)) { radeon_lock_cursor()
43 } else if (ASIC_IS_AVIVO(rdev)) { radeon_lock_cursor()
63 struct radeon_device *rdev = crtc->dev->dev_private; radeon_hide_cursor() local
65 if (ASIC_IS_DCE4(rdev)) { radeon_hide_cursor()
69 } else if (ASIC_IS_AVIVO(rdev)) { radeon_hide_cursor()
91 struct radeon_device *rdev = crtc->dev->dev_private; radeon_show_cursor() local
93 if (ASIC_IS_DCE4(rdev)) { radeon_show_cursor()
102 } else if (ASIC_IS_AVIVO(rdev)) { radeon_show_cursor()
103 if (rdev->family >= CHIP_RV770) { radeon_show_cursor()
142 struct radeon_device *rdev = crtc->dev->dev_private; radeon_cursor_move_locked() local
146 if (ASIC_IS_AVIVO(rdev)) { radeon_cursor_move_locked()
163 if (ASIC_IS_AVIVO(rdev) && !ASIC_IS_DCE6(rdev)) { radeon_cursor_move_locked()
204 if (ASIC_IS_DCE4(rdev)) { radeon_cursor_move_locked()
209 } else if (ASIC_IS_AVIVO(rdev)) { radeon_cursor_move_locked()
259 struct radeon_device *rdev = crtc->dev->dev_private; radeon_crtc_cursor_set2() local
291 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, radeon_crtc_cursor_set2()
H A Drv770_smc.h190 int rv770_copy_bytes_to_smc(struct radeon_device *rdev,
193 void rv770_start_smc(struct radeon_device *rdev);
194 void rv770_reset_smc(struct radeon_device *rdev);
195 void rv770_stop_smc_clock(struct radeon_device *rdev);
196 void rv770_start_smc_clock(struct radeon_device *rdev);
197 bool rv770_is_smc_running(struct radeon_device *rdev);
198 PPSMC_Result rv770_send_msg_to_smc(struct radeon_device *rdev, PPSMC_Msg msg);
199 PPSMC_Result rv770_wait_for_smc_inactive(struct radeon_device *rdev);
200 int rv770_read_smc_sram_dword(struct radeon_device *rdev,
202 int rv770_write_smc_sram_dword(struct radeon_device *rdev,
204 int rv770_load_smc_ucode(struct radeon_device *rdev,
H A Drv740_dpm.c32 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
121 int rv740_populate_sclk_value(struct radeon_device *rdev, u32 engine_clock, rv740_populate_sclk_value() argument
124 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv740_populate_sclk_value()
132 u32 reference_clock = rdev->clock.spll.reference_freq; rv740_populate_sclk_value()
137 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, rv740_populate_sclk_value()
163 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv740_populate_sclk_value()
187 int rv740_populate_mclk_value(struct radeon_device *rdev, rv740_populate_mclk_value() argument
191 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv740_populate_mclk_value()
205 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, rv740_populate_mclk_value()
210 ibias = rv770_map_clkf_to_ibias(rdev, dividers.whole_fb_div); rv740_populate_mclk_value()
250 if (radeon_atombios_get_asic_ss_info(rdev, &ss, rv740_populate_mclk_value()
252 u32 reference_clock = rdev->clock.mpll.reference_freq; rv740_populate_mclk_value()
285 void rv740_read_clock_registers(struct radeon_device *rdev) rv740_read_clock_registers() argument
287 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv740_read_clock_registers()
315 int rv740_populate_smc_acpi_state(struct radeon_device *rdev, rv740_populate_smc_acpi_state() argument
318 struct rv7xx_power_info *pi = rv770_get_pi(rdev); rv740_populate_smc_acpi_state()
334 rv770_populate_vddc_value(rdev, pi->acpi_vddc, rv740_populate_smc_acpi_state()
342 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table, rv740_populate_smc_acpi_state()
392 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); rv740_populate_smc_acpi_state()
397 void rv740_enable_mclk_spread_spectrum(struct radeon_device *rdev, rv740_enable_mclk_spread_spectrum() argument
H A Dradeon_acpi.c37 extern void radeon_pm_acpi_event_handler(struct radeon_device *rdev);
340 * @rdev: radeon_device pointer
347 int radeon_atif_handler(struct radeon_device *rdev, radeon_atif_handler() argument
350 struct radeon_atif *atif = &rdev->atif; radeon_atif_handler()
367 handle = ACPI_HANDLE(&rdev->pdev->dev); radeon_atif_handler()
382 radeon_set_backlight_level(rdev, enc, req.backlight_level); radeon_atif_handler()
385 if (rdev->is_atom_bios) { radeon_atif_handler()
522 * @rdev: radeon_device pointer
528 bool radeon_acpi_is_pcie_performance_request_supported(struct radeon_device *rdev) radeon_acpi_is_pcie_performance_request_supported() argument
530 struct radeon_atcs *atcs = &rdev->atcs; radeon_acpi_is_pcie_performance_request_supported()
541 * @rdev: radeon_device pointer
547 int radeon_acpi_pcie_notify_device_ready(struct radeon_device *rdev) radeon_acpi_pcie_notify_device_ready() argument
551 struct radeon_atcs *atcs = &rdev->atcs; radeon_acpi_pcie_notify_device_ready()
554 handle = ACPI_HANDLE(&rdev->pdev->dev); radeon_acpi_pcie_notify_device_ready()
573 * @rdev: radeon_device pointer
581 int radeon_acpi_pcie_performance_request(struct radeon_device *rdev, radeon_acpi_pcie_performance_request() argument
586 struct radeon_atcs *atcs = &rdev->atcs; radeon_acpi_pcie_performance_request()
594 handle = ACPI_HANDLE(&rdev->pdev->dev); radeon_acpi_pcie_performance_request()
603 atcs_input.client_id = rdev->pdev->devfn | (rdev->pdev->bus->number << 8); radeon_acpi_pcie_performance_request()
663 struct radeon_device *rdev = container_of(nb, struct radeon_device, acpi_nb); radeon_acpi_event() local
672 radeon_pm_acpi_event_handler(rdev); radeon_acpi_event()
676 return radeon_atif_handler(rdev, entry); radeon_acpi_event()
683 * @rdev: radeon_device pointer
689 int radeon_acpi_init(struct radeon_device *rdev) radeon_acpi_init() argument
692 struct radeon_atif *atif = &rdev->atif; radeon_acpi_init()
693 struct radeon_atcs *atcs = &rdev->atcs; radeon_acpi_init()
697 handle = ACPI_HANDLE(&rdev->pdev->dev); radeon_acpi_init()
700 if (!ASIC_IS_AVIVO(rdev) || !rdev->bios || !handle) radeon_acpi_init()
721 list_for_each_entry(tmp, &rdev->ddev->mode_config.encoder_list, radeon_acpi_init()
727 if (rdev->is_atom_bios) { radeon_acpi_init()
773 rdev->acpi_nb.notifier_call = radeon_acpi_event; radeon_acpi_init()
774 register_acpi_notifier(&rdev->acpi_nb); radeon_acpi_init()
782 * @rdev: radeon_device pointer
786 void radeon_acpi_fini(struct radeon_device *rdev) radeon_acpi_fini() argument
788 unregister_acpi_notifier(&rdev->acpi_nb); radeon_acpi_fini()
H A Datombios_encoders.c37 radeon_atom_get_backlight_level_from_reg(struct radeon_device *rdev) radeon_atom_get_backlight_level_from_reg() argument
42 if (rdev->family >= CHIP_R600) radeon_atom_get_backlight_level_from_reg()
54 radeon_atom_set_backlight_level_to_reg(struct radeon_device *rdev, radeon_atom_set_backlight_level_to_reg() argument
59 if (rdev->family >= CHIP_R600) radeon_atom_set_backlight_level_to_reg()
68 if (rdev->family >= CHIP_R600) radeon_atom_set_backlight_level_to_reg()
78 struct radeon_device *rdev = dev->dev_private; atombios_get_backlight_level() local
80 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) atombios_get_backlight_level()
83 return radeon_atom_get_backlight_level_from_reg(rdev); atombios_get_backlight_level()
91 struct radeon_device *rdev = dev->dev_private; atombios_set_backlight_level() local
96 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) atombios_set_backlight_level()
103 radeon_atom_set_backlight_level_to_reg(rdev, dig->backlight_level); atombios_set_backlight_level()
111 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_backlight_level()
114 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_backlight_level()
116 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_backlight_level()
168 struct radeon_device *rdev = dev->dev_private; radeon_atom_backlight_get_brightness() local
170 return radeon_atom_get_backlight_level_from_reg(rdev); radeon_atom_backlight_get_brightness()
182 struct radeon_device *rdev = dev->dev_private; radeon_atom_backlight_init() local
192 if ((rdev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && radeon_atom_backlight_init()
193 (rdev->pdev->device == 0x6741)) radeon_atom_backlight_init()
199 if (!rdev->is_atom_bios) radeon_atom_backlight_init()
202 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) radeon_atom_backlight_init()
240 rdev->mode_info.bl_encoder = radeon_encoder; radeon_atom_backlight_init()
252 struct radeon_device *rdev = dev->dev_private; radeon_atom_backlight_exit() local
259 if (!rdev->is_atom_bios) radeon_atom_backlight_exit()
262 if (!(rdev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) radeon_atom_backlight_exit()
293 bool radeon_atom_get_tv_timings(struct radeon_device *rdev, int index,
302 struct radeon_device *rdev = dev->dev_private; radeon_atom_mode_fixup() local
326 radeon_atom_get_tv_timings(rdev, 0, adjusted_mode); radeon_atom_mode_fixup()
328 radeon_atom_get_tv_timings(rdev, 1, adjusted_mode); radeon_atom_mode_fixup()
334 if (ASIC_IS_DCE3(rdev) && radeon_atom_mode_fixup()
348 struct radeon_device *rdev = dev->dev_private; atombios_dac_setup() local
392 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_dac_setup()
400 struct radeon_device *rdev = dev->dev_private; atombios_tv_setup() local
448 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_tv_setup()
489 struct radeon_device *rdev = dev->dev_private; atombios_dvo_setup() local
497 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_dvo_setup()
501 if (rdev->family <= CHIP_RV410) atombios_dvo_setup()
549 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_dvo_setup()
561 struct radeon_device *rdev = dev->dev_private; atombios_digital_setup() local
593 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_digital_setup()
667 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_digital_setup()
674 struct radeon_device *rdev = dev->dev_private; atombios_get_encoder_mode() local
749 ASIC_IS_DCE4(rdev) && !ASIC_IS_DCE5(rdev)) atombios_get_encoder_mode()
767 ASIC_IS_DCE4(rdev) && !ASIC_IS_DCE5(rdev)) atombios_get_encoder_mode()
842 struct radeon_device *rdev = dev->dev_private; atombios_dig_encoder_setup2() local
869 if (ASIC_IS_DCE4(rdev)) atombios_dig_encoder_setup2()
878 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_dig_encoder_setup2()
990 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_dig_encoder_setup2()
1012 struct radeon_device *rdev = dev->dev_private; atombios_dig_transmitter_setup2() local
1079 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_dig_transmitter_setup2()
1108 if ((rdev->flags & RADEON_IS_IGP) && atombios_dig_transmitter_setup2()
1217 if (is_dp && rdev->clock.dp_extclk) atombios_dig_transmitter_setup2()
1277 if (rdev->clock.dp_extclk) atombios_dig_transmitter_setup2()
1344 if (is_dp && rdev->clock.dp_extclk) atombios_dig_transmitter_setup2()
1372 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_dig_transmitter_setup2()
1386 struct radeon_device *rdev = dev->dev_private; atombios_set_edp_panel_power() local
1394 if (!ASIC_IS_DCE4(rdev)) atombios_set_edp_panel_power()
1401 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_set_edp_panel_power()
1408 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_edp_panel_power()
1415 if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) atombios_set_edp_panel_power()
1436 struct radeon_device *rdev = dev->dev_private; atombios_external_encoder_setup() local
1466 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_external_encoder_setup()
1530 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_external_encoder_setup()
1537 struct radeon_device *rdev = dev->dev_private; atombios_yuv_setup() local
1546 if (rdev->family >= CHIP_R600) atombios_yuv_setup()
1565 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_yuv_setup()
1574 struct radeon_device *rdev = dev->dev_private; radeon_atom_encoder_dpms_avivo() local
1629 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_encoder_dpms_avivo()
1632 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_encoder_dpms_avivo()
1634 if (rdev->mode_info.bl_encoder) { radeon_atom_encoder_dpms_avivo()
1640 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_encoder_dpms_avivo()
1648 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_encoder_dpms_avivo()
1651 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); radeon_atom_encoder_dpms_avivo()
1661 struct radeon_device *rdev = dev->dev_private; radeon_atom_encoder_dpms_dig() local
1676 !ASIC_IS_DCE5(rdev)) radeon_atom_encoder_dpms_dig()
1682 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) { radeon_atom_encoder_dpms_dig()
1694 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev)) radeon_atom_encoder_dpms_dig()
1698 } else if (ASIC_IS_DCE4(rdev)) { radeon_atom_encoder_dpms_dig()
1718 if (ASIC_IS_DCE4(rdev)) radeon_atom_encoder_dpms_dig()
1722 if (rdev->mode_info.bl_encoder) radeon_atom_encoder_dpms_dig()
1739 if (ASIC_IS_DCE4(rdev)) { radeon_atom_encoder_dpms_dig()
1752 if (ASIC_IS_DCE4(rdev)) { radeon_atom_encoder_dpms_dig()
1779 struct radeon_device *rdev = dev->dev_private; radeon_atom_encoder_dpms() local
1811 if (ASIC_IS_DCE5(rdev)) { radeon_atom_encoder_dpms()
1822 } else if (ASIC_IS_DCE3(rdev)) radeon_atom_encoder_dpms()
1829 if (ASIC_IS_DCE5(rdev)) { radeon_atom_encoder_dpms()
1860 struct radeon_device *rdev = dev->dev_private; atombios_set_encoder_crtc_source() local
1870 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_set_encoder_crtc_source()
1878 if (ASIC_IS_AVIVO(rdev)) atombios_set_encoder_crtc_source()
1999 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_encoder_crtc_source()
2009 struct radeon_device *rdev = dev->dev_private; atombios_set_mst_encoder_crtc_source() local
2017 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_set_mst_encoder_crtc_source()
2049 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_set_mst_encoder_crtc_source()
2057 struct radeon_device *rdev = dev->dev_private; atombios_apply_encoder_quirks() local
2076 if (ASIC_IS_AVIVO(rdev) && atombios_apply_encoder_quirks()
2078 if (ASIC_IS_DCE8(rdev)) { atombios_apply_encoder_quirks()
2084 } else if (ASIC_IS_DCE4(rdev)) { atombios_apply_encoder_quirks()
2100 void radeon_atom_release_dig_encoder(struct radeon_device *rdev, int enc_idx) radeon_atom_release_dig_encoder() argument
2104 rdev->mode_info.active_encoders &= ~(1 << enc_idx); radeon_atom_release_dig_encoder()
2110 struct radeon_device *rdev = dev->dev_private; radeon_atom_pick_dig_encoder() local
2122 if (ASIC_IS_DCE6(rdev)) { radeon_atom_pick_dig_encoder()
2148 } else if (ASIC_IS_DCE4(rdev)) { radeon_atom_pick_dig_encoder()
2150 if (ASIC_IS_DCE41(rdev) && !ASIC_IS_DCE61(rdev)) { radeon_atom_pick_dig_encoder()
2152 if (rdev->family == CHIP_PALM) { radeon_atom_pick_dig_encoder()
2186 if (ASIC_IS_DCE32(rdev)) { radeon_atom_pick_dig_encoder()
2222 if (rdev->mode_info.active_encoders & (1 << enc_idx)) { radeon_atom_pick_dig_encoder()
2225 rdev->mode_info.active_encoders |= (1 << enc_idx); radeon_atom_pick_dig_encoder()
2231 radeon_atom_encoder_init(struct radeon_device *rdev) radeon_atom_encoder_init() argument
2233 struct drm_device *dev = rdev->ddev; radeon_atom_encoder_init()
2252 if (ext_encoder && (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev))) radeon_atom_encoder_init()
2264 struct radeon_device *rdev = dev->dev_private; radeon_atom_encoder_mode_set() local
2274 if (ASIC_IS_AVIVO(rdev) && !ASIC_IS_DCE4(rdev)) { radeon_atom_encoder_mode_set()
2327 struct radeon_device *rdev = dev->dev_private; atombios_dac_load_detect() local
2340 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) atombios_dac_load_detect()
2365 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); atombios_dac_load_detect()
2376 struct radeon_device *rdev = dev->dev_private; radeon_atom_dac_detect() local
2386 if (rdev->family >= CHIP_R600) radeon_atom_dac_detect()
2417 struct radeon_device *rdev = dev->dev_private; radeon_atom_dig_detect() local
2423 if (!ASIC_IS_DCE4(rdev)) radeon_atom_dig_detect()
2474 struct radeon_device *rdev = encoder->dev->dev_private; radeon_atom_encoder_prepare() local
2485 radeon_atom_release_dig_encoder(rdev, dig->dig_encoder); radeon_atom_encoder_prepare()
2488 if (rdev->family >= CHIP_R600) radeon_atom_encoder_prepare()
2489 dig->afmt = rdev->mode_info.afmt[dig->dig_encoder]; radeon_atom_encoder_prepare()
2492 dig->afmt = rdev->mode_info.afmt[0]; radeon_atom_encoder_prepare()
2515 if (ASIC_IS_DCE8(rdev)) radeon_atom_encoder_prepare()
2517 else if (ASIC_IS_DCE4(rdev)) radeon_atom_encoder_prepare()
2519 else if (ASIC_IS_DCE3(rdev)) radeon_atom_encoder_prepare()
2521 else if (ASIC_IS_AVIVO(rdev)) radeon_atom_encoder_prepare()
2535 struct radeon_device *rdev = dev->dev_private; radeon_atom_encoder_disable() local
2543 if (!ASIC_IS_DCE3(rdev)) { radeon_atom_encoder_disable()
2589 if (rdev->asic->display.hdmi_enable) radeon_atom_encoder_disable()
2590 radeon_hdmi_enable(rdev, encoder, false); radeon_atom_encoder_disable()
2594 radeon_atom_release_dig_encoder(rdev, dig->dig_encoder); radeon_atom_encoder_disable()
2686 struct radeon_device *rdev = dev->dev_private; radeon_atombios_set_dac_info() local
2692 dac->tv_std = radeon_atombios_get_tv_info(rdev); radeon_atombios_set_dac_info()
2723 struct radeon_device *rdev = dev->dev_private; radeon_add_atom_encoder() local
2743 switch (rdev->num_crtc) { radeon_add_atom_encoder()
H A Dradeon_i2c.c93 struct radeon_device *rdev = i2c->dev->dev_private; pre_xfer() local
104 if ((rdev->family >= CHIP_R200) && !ASIC_IS_AVIVO(rdev)) { pre_xfer()
107 if (rdev->family >= CHIP_RV350) pre_xfer()
109 else if ((rdev->family == CHIP_R300) || pre_xfer()
110 (rdev->family == CHIP_R350)) pre_xfer()
115 mutex_lock(&rdev->dc_hw_i2c_mutex); pre_xfer()
123 mutex_unlock(&rdev->dc_hw_i2c_mutex); pre_xfer()
128 if (ASIC_IS_DCE3(rdev) && rec->hw_capable) { pre_xfer()
163 struct radeon_device *rdev = i2c->dev->dev_private; post_xfer() local
182 struct radeon_device *rdev = i2c->dev->dev_private; get_clock() local
197 struct radeon_device *rdev = i2c->dev->dev_private; get_data() local
211 struct radeon_device *rdev = i2c->dev->dev_private; set_clock() local
224 struct radeon_device *rdev = i2c->dev->dev_private; set_data() local
236 static u32 radeon_get_i2c_prescale(struct radeon_device *rdev) radeon_get_i2c_prescale() argument
238 u32 sclk = rdev->pm.current_sclk; radeon_get_i2c_prescale()
244 switch (rdev->family) { radeon_get_i2c_prescale()
287 if (rdev->family == CHIP_R520) radeon_get_i2c_prescale()
330 struct radeon_device *rdev = i2c->dev->dev_private; r100_hw_i2c_xfer() local
338 mutex_lock(&rdev->dc_hw_i2c_mutex); r100_hw_i2c_xfer()
340 mutex_lock(&rdev->pm.mutex); r100_hw_i2c_xfer()
342 prescale = radeon_get_i2c_prescale(rdev); r100_hw_i2c_xfer()
350 if (rdev->is_atom_bios) { r100_hw_i2c_xfer()
364 switch (rdev->family) { r100_hw_i2c_xfer()
564 if (rdev->is_atom_bios) { r100_hw_i2c_xfer()
570 mutex_unlock(&rdev->pm.mutex); r100_hw_i2c_xfer()
571 mutex_unlock(&rdev->dc_hw_i2c_mutex); r100_hw_i2c_xfer()
583 struct radeon_device *rdev = i2c->dev->dev_private; r500_hw_i2c_xfer() local
591 mutex_lock(&rdev->dc_hw_i2c_mutex); r500_hw_i2c_xfer()
593 mutex_lock(&rdev->pm.mutex); r500_hw_i2c_xfer()
595 prescale = radeon_get_i2c_prescale(rdev); r500_hw_i2c_xfer()
806 mutex_unlock(&rdev->pm.mutex); r500_hw_i2c_xfer()
807 mutex_unlock(&rdev->dc_hw_i2c_mutex); r500_hw_i2c_xfer()
816 struct radeon_device *rdev = i2c->dev->dev_private; radeon_hw_i2c_xfer() local
822 switch (rdev->family) { radeon_hw_i2c_xfer()
912 struct radeon_device *rdev = dev->dev_private; radeon_i2c_create() local
934 ((rdev->family <= CHIP_RS480) || radeon_i2c_create()
935 ((rdev->family >= CHIP_RV515) && (rdev->family <= CHIP_R580))))) { radeon_i2c_create()
947 ASIC_IS_DCE3(rdev)) { radeon_i2c_create()
996 void radeon_i2c_init(struct radeon_device *rdev) radeon_i2c_init() argument
1001 if (rdev->is_atom_bios) radeon_i2c_init()
1002 radeon_atombios_i2c_init(rdev); radeon_i2c_init()
1004 radeon_combios_i2c_init(rdev); radeon_i2c_init()
1008 void radeon_i2c_fini(struct radeon_device *rdev) radeon_i2c_fini() argument
1013 if (rdev->i2c_bus[i]) { radeon_i2c_fini()
1014 radeon_i2c_destroy(rdev->i2c_bus[i]); radeon_i2c_fini()
1015 rdev->i2c_bus[i] = NULL; radeon_i2c_fini()
1021 void radeon_i2c_add(struct radeon_device *rdev, radeon_i2c_add() argument
1025 struct drm_device *dev = rdev->ddev; radeon_i2c_add()
1029 if (!rdev->i2c_bus[i]) { radeon_i2c_add()
1030 rdev->i2c_bus[i] = radeon_i2c_create(dev, rec, name); radeon_i2c_add()
1037 struct radeon_i2c_chan *radeon_i2c_lookup(struct radeon_device *rdev, radeon_i2c_lookup() argument
1043 if (rdev->i2c_bus[i] && radeon_i2c_lookup()
1044 (rdev->i2c_bus[i]->rec.i2c_id == i2c_bus->i2c_id)) { radeon_i2c_lookup()
1045 return rdev->i2c_bus[i]; radeon_i2c_lookup()
/linux-4.4.14/net/wireless/
H A Drdev-ops.h9 static inline int rdev_suspend(struct cfg80211_registered_device *rdev, rdev_suspend() argument
13 trace_rdev_suspend(&rdev->wiphy, wowlan); rdev_suspend()
14 ret = rdev->ops->suspend(&rdev->wiphy, wowlan); rdev_suspend()
15 trace_rdev_return_int(&rdev->wiphy, ret); rdev_suspend()
19 static inline int rdev_resume(struct cfg80211_registered_device *rdev) rdev_resume() argument
22 trace_rdev_resume(&rdev->wiphy); rdev_resume()
23 ret = rdev->ops->resume(&rdev->wiphy); rdev_resume()
24 trace_rdev_return_int(&rdev->wiphy, ret); rdev_resume()
28 static inline void rdev_set_wakeup(struct cfg80211_registered_device *rdev, rdev_set_wakeup() argument
31 trace_rdev_set_wakeup(&rdev->wiphy, enabled); rdev_set_wakeup()
32 rdev->ops->set_wakeup(&rdev->wiphy, enabled); rdev_set_wakeup()
33 trace_rdev_return_void(&rdev->wiphy); rdev_set_wakeup()
37 *rdev_add_virtual_intf(struct cfg80211_registered_device *rdev, char *name, rdev_add_virtual_intf() argument
43 trace_rdev_add_virtual_intf(&rdev->wiphy, name, type); rdev_add_virtual_intf()
44 ret = rdev->ops->add_virtual_intf(&rdev->wiphy, name, name_assign_type, rdev_add_virtual_intf()
46 trace_rdev_return_wdev(&rdev->wiphy, ret); rdev_add_virtual_intf()
51 rdev_del_virtual_intf(struct cfg80211_registered_device *rdev, rdev_del_virtual_intf() argument
55 trace_rdev_del_virtual_intf(&rdev->wiphy, wdev); rdev_del_virtual_intf()
56 ret = rdev->ops->del_virtual_intf(&rdev->wiphy, wdev); rdev_del_virtual_intf()
57 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_virtual_intf()
62 rdev_change_virtual_intf(struct cfg80211_registered_device *rdev, rdev_change_virtual_intf() argument
67 trace_rdev_change_virtual_intf(&rdev->wiphy, dev, type); rdev_change_virtual_intf()
68 ret = rdev->ops->change_virtual_intf(&rdev->wiphy, dev, type, flags, rdev_change_virtual_intf()
70 trace_rdev_return_int(&rdev->wiphy, ret); rdev_change_virtual_intf()
74 static inline int rdev_add_key(struct cfg80211_registered_device *rdev, rdev_add_key() argument
80 trace_rdev_add_key(&rdev->wiphy, netdev, key_index, pairwise, mac_addr); rdev_add_key()
81 ret = rdev->ops->add_key(&rdev->wiphy, netdev, key_index, pairwise, rdev_add_key()
83 trace_rdev_return_int(&rdev->wiphy, ret); rdev_add_key()
88 rdev_get_key(struct cfg80211_registered_device *rdev, struct net_device *netdev, rdev_get_key() argument
93 trace_rdev_get_key(&rdev->wiphy, netdev, key_index, pairwise, mac_addr); rdev_get_key()
94 ret = rdev->ops->get_key(&rdev->wiphy, netdev, key_index, pairwise, rdev_get_key()
96 trace_rdev_return_int(&rdev->wiphy, ret); rdev_get_key()
100 static inline int rdev_del_key(struct cfg80211_registered_device *rdev, rdev_del_key() argument
105 trace_rdev_del_key(&rdev->wiphy, netdev, key_index, pairwise, mac_addr); rdev_del_key()
106 ret = rdev->ops->del_key(&rdev->wiphy, netdev, key_index, pairwise, rdev_del_key()
108 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_key()
113 rdev_set_default_key(struct cfg80211_registered_device *rdev, rdev_set_default_key() argument
118 trace_rdev_set_default_key(&rdev->wiphy, netdev, key_index, rdev_set_default_key()
120 ret = rdev->ops->set_default_key(&rdev->wiphy, netdev, key_index, rdev_set_default_key()
122 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_default_key()
127 rdev_set_default_mgmt_key(struct cfg80211_registered_device *rdev, rdev_set_default_mgmt_key() argument
131 trace_rdev_set_default_mgmt_key(&rdev->wiphy, netdev, key_index); rdev_set_default_mgmt_key()
132 ret = rdev->ops->set_default_mgmt_key(&rdev->wiphy, netdev, rdev_set_default_mgmt_key()
134 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_default_mgmt_key()
138 static inline int rdev_start_ap(struct cfg80211_registered_device *rdev, rdev_start_ap() argument
143 trace_rdev_start_ap(&rdev->wiphy, dev, settings); rdev_start_ap()
144 ret = rdev->ops->start_ap(&rdev->wiphy, dev, settings); rdev_start_ap()
145 trace_rdev_return_int(&rdev->wiphy, ret); rdev_start_ap()
149 static inline int rdev_change_beacon(struct cfg80211_registered_device *rdev, rdev_change_beacon() argument
154 trace_rdev_change_beacon(&rdev->wiphy, dev, info); rdev_change_beacon()
155 ret = rdev->ops->change_beacon(&rdev->wiphy, dev, info); rdev_change_beacon()
156 trace_rdev_return_int(&rdev->wiphy, ret); rdev_change_beacon()
160 static inline int rdev_stop_ap(struct cfg80211_registered_device *rdev, rdev_stop_ap() argument
164 trace_rdev_stop_ap(&rdev->wiphy, dev); rdev_stop_ap()
165 ret = rdev->ops->stop_ap(&rdev->wiphy, dev); rdev_stop_ap()
166 trace_rdev_return_int(&rdev->wiphy, ret); rdev_stop_ap()
170 static inline int rdev_add_station(struct cfg80211_registered_device *rdev, rdev_add_station() argument
175 trace_rdev_add_station(&rdev->wiphy, dev, mac, params); rdev_add_station()
176 ret = rdev->ops->add_station(&rdev->wiphy, dev, mac, params); rdev_add_station()
177 trace_rdev_return_int(&rdev->wiphy, ret); rdev_add_station()
181 static inline int rdev_del_station(struct cfg80211_registered_device *rdev, rdev_del_station() argument
186 trace_rdev_del_station(&rdev->wiphy, dev, params); rdev_del_station()
187 ret = rdev->ops->del_station(&rdev->wiphy, dev, params); rdev_del_station()
188 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_station()
192 static inline int rdev_change_station(struct cfg80211_registered_device *rdev, rdev_change_station() argument
197 trace_rdev_change_station(&rdev->wiphy, dev, mac, params); rdev_change_station()
198 ret = rdev->ops->change_station(&rdev->wiphy, dev, mac, params); rdev_change_station()
199 trace_rdev_return_int(&rdev->wiphy, ret); rdev_change_station()
203 static inline int rdev_get_station(struct cfg80211_registered_device *rdev, rdev_get_station() argument
208 trace_rdev_get_station(&rdev->wiphy, dev, mac); rdev_get_station()
209 ret = rdev->ops->get_station(&rdev->wiphy, dev, mac, sinfo); rdev_get_station()
210 trace_rdev_return_int_station_info(&rdev->wiphy, ret, sinfo); rdev_get_station()
214 static inline int rdev_dump_station(struct cfg80211_registered_device *rdev, rdev_dump_station() argument
219 trace_rdev_dump_station(&rdev->wiphy, dev, idx, mac); rdev_dump_station()
220 ret = rdev->ops->dump_station(&rdev->wiphy, dev, idx, mac, sinfo); rdev_dump_station()
221 trace_rdev_return_int_station_info(&rdev->wiphy, ret, sinfo); rdev_dump_station()
225 static inline int rdev_add_mpath(struct cfg80211_registered_device *rdev, rdev_add_mpath() argument
229 trace_rdev_add_mpath(&rdev->wiphy, dev, dst, next_hop); rdev_add_mpath()
230 ret = rdev->ops->add_mpath(&rdev->wiphy, dev, dst, next_hop); rdev_add_mpath()
231 trace_rdev_return_int(&rdev->wiphy, ret); rdev_add_mpath()
235 static inline int rdev_del_mpath(struct cfg80211_registered_device *rdev, rdev_del_mpath() argument
239 trace_rdev_del_mpath(&rdev->wiphy, dev, dst); rdev_del_mpath()
240 ret = rdev->ops->del_mpath(&rdev->wiphy, dev, dst); rdev_del_mpath()
241 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_mpath()
245 static inline int rdev_change_mpath(struct cfg80211_registered_device *rdev, rdev_change_mpath() argument
250 trace_rdev_change_mpath(&rdev->wiphy, dev, dst, next_hop); rdev_change_mpath()
251 ret = rdev->ops->change_mpath(&rdev->wiphy, dev, dst, next_hop); rdev_change_mpath()
252 trace_rdev_return_int(&rdev->wiphy, ret); rdev_change_mpath()
256 static inline int rdev_get_mpath(struct cfg80211_registered_device *rdev, rdev_get_mpath() argument
261 trace_rdev_get_mpath(&rdev->wiphy, dev, dst, next_hop); rdev_get_mpath()
262 ret = rdev->ops->get_mpath(&rdev->wiphy, dev, dst, next_hop, pinfo); rdev_get_mpath()
263 trace_rdev_return_int_mpath_info(&rdev->wiphy, ret, pinfo); rdev_get_mpath()
268 static inline int rdev_get_mpp(struct cfg80211_registered_device *rdev, rdev_get_mpp() argument
274 trace_rdev_get_mpp(&rdev->wiphy, dev, dst, mpp); rdev_get_mpp()
275 ret = rdev->ops->get_mpp(&rdev->wiphy, dev, dst, mpp, pinfo); rdev_get_mpp()
276 trace_rdev_return_int_mpath_info(&rdev->wiphy, ret, pinfo); rdev_get_mpp()
280 static inline int rdev_dump_mpath(struct cfg80211_registered_device *rdev, rdev_dump_mpath() argument
286 trace_rdev_dump_mpath(&rdev->wiphy, dev, idx, dst, next_hop); rdev_dump_mpath()
287 ret = rdev->ops->dump_mpath(&rdev->wiphy, dev, idx, dst, next_hop, rdev_dump_mpath()
289 trace_rdev_return_int_mpath_info(&rdev->wiphy, ret, pinfo); rdev_dump_mpath()
293 static inline int rdev_dump_mpp(struct cfg80211_registered_device *rdev, rdev_dump_mpp() argument
300 trace_rdev_dump_mpp(&rdev->wiphy, dev, idx, dst, mpp); rdev_dump_mpp()
301 ret = rdev->ops->dump_mpp(&rdev->wiphy, dev, idx, dst, mpp, pinfo); rdev_dump_mpp()
302 trace_rdev_return_int_mpath_info(&rdev->wiphy, ret, pinfo); rdev_dump_mpp()
307 rdev_get_mesh_config(struct cfg80211_registered_device *rdev, rdev_get_mesh_config() argument
311 trace_rdev_get_mesh_config(&rdev->wiphy, dev); rdev_get_mesh_config()
312 ret = rdev->ops->get_mesh_config(&rdev->wiphy, dev, conf); rdev_get_mesh_config()
313 trace_rdev_return_int_mesh_config(&rdev->wiphy, ret, conf); rdev_get_mesh_config()
318 rdev_update_mesh_config(struct cfg80211_registered_device *rdev, rdev_update_mesh_config() argument
323 trace_rdev_update_mesh_config(&rdev->wiphy, dev, mask, nconf); rdev_update_mesh_config()
324 ret = rdev->ops->update_mesh_config(&rdev->wiphy, dev, mask, nconf); rdev_update_mesh_config()
325 trace_rdev_return_int(&rdev->wiphy, ret); rdev_update_mesh_config()
329 static inline int rdev_join_mesh(struct cfg80211_registered_device *rdev, rdev_join_mesh() argument
335 trace_rdev_join_mesh(&rdev->wiphy, dev, conf, setup); rdev_join_mesh()
336 ret = rdev->ops->join_mesh(&rdev->wiphy, dev, conf, setup); rdev_join_mesh()
337 trace_rdev_return_int(&rdev->wiphy, ret); rdev_join_mesh()
342 static inline int rdev_leave_mesh(struct cfg80211_registered_device *rdev, rdev_leave_mesh() argument
346 trace_rdev_leave_mesh(&rdev->wiphy, dev); rdev_leave_mesh()
347 ret = rdev->ops->leave_mesh(&rdev->wiphy, dev); rdev_leave_mesh()
348 trace_rdev_return_int(&rdev->wiphy, ret); rdev_leave_mesh()
352 static inline int rdev_join_ocb(struct cfg80211_registered_device *rdev, rdev_join_ocb() argument
357 trace_rdev_join_ocb(&rdev->wiphy, dev, setup); rdev_join_ocb()
358 ret = rdev->ops->join_ocb(&rdev->wiphy, dev, setup); rdev_join_ocb()
359 trace_rdev_return_int(&rdev->wiphy, ret); rdev_join_ocb()
363 static inline int rdev_leave_ocb(struct cfg80211_registered_device *rdev, rdev_leave_ocb() argument
367 trace_rdev_leave_ocb(&rdev->wiphy, dev); rdev_leave_ocb()
368 ret = rdev->ops->leave_ocb(&rdev->wiphy, dev); rdev_leave_ocb()
369 trace_rdev_return_int(&rdev->wiphy, ret); rdev_leave_ocb()
373 static inline int rdev_change_bss(struct cfg80211_registered_device *rdev, rdev_change_bss() argument
379 trace_rdev_change_bss(&rdev->wiphy, dev, params); rdev_change_bss()
380 ret = rdev->ops->change_bss(&rdev->wiphy, dev, params); rdev_change_bss()
381 trace_rdev_return_int(&rdev->wiphy, ret); rdev_change_bss()
385 static inline int rdev_set_txq_params(struct cfg80211_registered_device *rdev, rdev_set_txq_params() argument
391 trace_rdev_set_txq_params(&rdev->wiphy, dev, params); rdev_set_txq_params()
392 ret = rdev->ops->set_txq_params(&rdev->wiphy, dev, params); rdev_set_txq_params()
393 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_txq_params()
398 rdev_libertas_set_mesh_channel(struct cfg80211_registered_device *rdev, rdev_libertas_set_mesh_channel() argument
403 trace_rdev_libertas_set_mesh_channel(&rdev->wiphy, dev, chan); rdev_libertas_set_mesh_channel()
404 ret = rdev->ops->libertas_set_mesh_channel(&rdev->wiphy, dev, chan); rdev_libertas_set_mesh_channel()
405 trace_rdev_return_int(&rdev->wiphy, ret); rdev_libertas_set_mesh_channel()
410 rdev_set_monitor_channel(struct cfg80211_registered_device *rdev, rdev_set_monitor_channel() argument
414 trace_rdev_set_monitor_channel(&rdev->wiphy, chandef); rdev_set_monitor_channel()
415 ret = rdev->ops->set_monitor_channel(&rdev->wiphy, chandef); rdev_set_monitor_channel()
416 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_monitor_channel()
420 static inline int rdev_scan(struct cfg80211_registered_device *rdev, rdev_scan() argument
424 trace_rdev_scan(&rdev->wiphy, request); rdev_scan()
425 ret = rdev->ops->scan(&rdev->wiphy, request); rdev_scan()
426 trace_rdev_return_int(&rdev->wiphy, ret); rdev_scan()
430 static inline int rdev_auth(struct cfg80211_registered_device *rdev, rdev_auth() argument
435 trace_rdev_auth(&rdev->wiphy, dev, req); rdev_auth()
436 ret = rdev->ops->auth(&rdev->wiphy, dev, req); rdev_auth()
437 trace_rdev_return_int(&rdev->wiphy, ret); rdev_auth()
441 static inline int rdev_assoc(struct cfg80211_registered_device *rdev, rdev_assoc() argument
446 trace_rdev_assoc(&rdev->wiphy, dev, req); rdev_assoc()
447 ret = rdev->ops->assoc(&rdev->wiphy, dev, req); rdev_assoc()
448 trace_rdev_return_int(&rdev->wiphy, ret); rdev_assoc()
452 static inline int rdev_deauth(struct cfg80211_registered_device *rdev, rdev_deauth() argument
457 trace_rdev_deauth(&rdev->wiphy, dev, req); rdev_deauth()
458 ret = rdev->ops->deauth(&rdev->wiphy, dev, req); rdev_deauth()
459 trace_rdev_return_int(&rdev->wiphy, ret); rdev_deauth()
463 static inline int rdev_disassoc(struct cfg80211_registered_device *rdev, rdev_disassoc() argument
468 trace_rdev_disassoc(&rdev->wiphy, dev, req); rdev_disassoc()
469 ret = rdev->ops->disassoc(&rdev->wiphy, dev, req); rdev_disassoc()
470 trace_rdev_return_int(&rdev->wiphy, ret); rdev_disassoc()
474 static inline int rdev_connect(struct cfg80211_registered_device *rdev, rdev_connect() argument
479 trace_rdev_connect(&rdev->wiphy, dev, sme); rdev_connect()
480 ret = rdev->ops->connect(&rdev->wiphy, dev, sme); rdev_connect()
481 trace_rdev_return_int(&rdev->wiphy, ret); rdev_connect()
485 static inline int rdev_disconnect(struct cfg80211_registered_device *rdev, rdev_disconnect() argument
489 trace_rdev_disconnect(&rdev->wiphy, dev, reason_code); rdev_disconnect()
490 ret = rdev->ops->disconnect(&rdev->wiphy, dev, reason_code); rdev_disconnect()
491 trace_rdev_return_int(&rdev->wiphy, ret); rdev_disconnect()
495 static inline int rdev_join_ibss(struct cfg80211_registered_device *rdev, rdev_join_ibss() argument
500 trace_rdev_join_ibss(&rdev->wiphy, dev, params); rdev_join_ibss()
501 ret = rdev->ops->join_ibss(&rdev->wiphy, dev, params); rdev_join_ibss()
502 trace_rdev_return_int(&rdev->wiphy, ret); rdev_join_ibss()
506 static inline int rdev_leave_ibss(struct cfg80211_registered_device *rdev, rdev_leave_ibss() argument
510 trace_rdev_leave_ibss(&rdev->wiphy, dev); rdev_leave_ibss()
511 ret = rdev->ops->leave_ibss(&rdev->wiphy, dev); rdev_leave_ibss()
512 trace_rdev_return_int(&rdev->wiphy, ret); rdev_leave_ibss()
517 rdev_set_wiphy_params(struct cfg80211_registered_device *rdev, u32 changed) rdev_set_wiphy_params() argument
520 trace_rdev_set_wiphy_params(&rdev->wiphy, changed); rdev_set_wiphy_params()
521 ret = rdev->ops->set_wiphy_params(&rdev->wiphy, changed); rdev_set_wiphy_params()
522 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_wiphy_params()
526 static inline int rdev_set_tx_power(struct cfg80211_registered_device *rdev, rdev_set_tx_power() argument
531 trace_rdev_set_tx_power(&rdev->wiphy, wdev, type, mbm); rdev_set_tx_power()
532 ret = rdev->ops->set_tx_power(&rdev->wiphy, wdev, type, mbm); rdev_set_tx_power()
533 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_tx_power()
537 static inline int rdev_get_tx_power(struct cfg80211_registered_device *rdev, rdev_get_tx_power() argument
541 trace_rdev_get_tx_power(&rdev->wiphy, wdev); rdev_get_tx_power()
542 ret = rdev->ops->get_tx_power(&rdev->wiphy, wdev, dbm); rdev_get_tx_power()
543 trace_rdev_return_int_int(&rdev->wiphy, ret, *dbm); rdev_get_tx_power()
547 static inline int rdev_set_wds_peer(struct cfg80211_registered_device *rdev, rdev_set_wds_peer() argument
551 trace_rdev_set_wds_peer(&rdev->wiphy, dev, addr); rdev_set_wds_peer()
552 ret = rdev->ops->set_wds_peer(&rdev->wiphy, dev, addr); rdev_set_wds_peer()
553 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_wds_peer()
557 static inline void rdev_rfkill_poll(struct cfg80211_registered_device *rdev) rdev_rfkill_poll() argument
559 trace_rdev_rfkill_poll(&rdev->wiphy); rdev_rfkill_poll()
560 rdev->ops->rfkill_poll(&rdev->wiphy); rdev_rfkill_poll()
561 trace_rdev_return_void(&rdev->wiphy); rdev_rfkill_poll()
566 static inline int rdev_testmode_cmd(struct cfg80211_registered_device *rdev, rdev_testmode_cmd() argument
571 trace_rdev_testmode_cmd(&rdev->wiphy, wdev); rdev_testmode_cmd()
572 ret = rdev->ops->testmode_cmd(&rdev->wiphy, wdev, data, len); rdev_testmode_cmd()
573 trace_rdev_return_int(&rdev->wiphy, ret); rdev_testmode_cmd()
577 static inline int rdev_testmode_dump(struct cfg80211_registered_device *rdev, rdev_testmode_dump() argument
583 trace_rdev_testmode_dump(&rdev->wiphy); rdev_testmode_dump()
584 ret = rdev->ops->testmode_dump(&rdev->wiphy, skb, cb, data, len); rdev_testmode_dump()
585 trace_rdev_return_int(&rdev->wiphy, ret); rdev_testmode_dump()
591 rdev_set_bitrate_mask(struct cfg80211_registered_device *rdev, rdev_set_bitrate_mask() argument
596 trace_rdev_set_bitrate_mask(&rdev->wiphy, dev, peer, mask); rdev_set_bitrate_mask()
597 ret = rdev->ops->set_bitrate_mask(&rdev->wiphy, dev, peer, mask); rdev_set_bitrate_mask()
598 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_bitrate_mask()
602 static inline int rdev_dump_survey(struct cfg80211_registered_device *rdev, rdev_dump_survey() argument
607 trace_rdev_dump_survey(&rdev->wiphy, netdev, idx); rdev_dump_survey()
608 ret = rdev->ops->dump_survey(&rdev->wiphy, netdev, idx, info); rdev_dump_survey()
610 trace_rdev_return_int(&rdev->wiphy, ret); rdev_dump_survey()
612 trace_rdev_return_int_survey_info(&rdev->wiphy, ret, info); rdev_dump_survey()
616 static inline int rdev_set_pmksa(struct cfg80211_registered_device *rdev, rdev_set_pmksa() argument
621 trace_rdev_set_pmksa(&rdev->wiphy, netdev, pmksa); rdev_set_pmksa()
622 ret = rdev->ops->set_pmksa(&rdev->wiphy, netdev, pmksa); rdev_set_pmksa()
623 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_pmksa()
627 static inline int rdev_del_pmksa(struct cfg80211_registered_device *rdev, rdev_del_pmksa() argument
632 trace_rdev_del_pmksa(&rdev->wiphy, netdev, pmksa); rdev_del_pmksa()
633 ret = rdev->ops->del_pmksa(&rdev->wiphy, netdev, pmksa); rdev_del_pmksa()
634 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_pmksa()
638 static inline int rdev_flush_pmksa(struct cfg80211_registered_device *rdev, rdev_flush_pmksa() argument
642 trace_rdev_flush_pmksa(&rdev->wiphy, netdev); rdev_flush_pmksa()
643 ret = rdev->ops->flush_pmksa(&rdev->wiphy, netdev); rdev_flush_pmksa()
644 trace_rdev_return_int(&rdev->wiphy, ret); rdev_flush_pmksa()
649 rdev_remain_on_channel(struct cfg80211_registered_device *rdev, rdev_remain_on_channel() argument
655 trace_rdev_remain_on_channel(&rdev->wiphy, wdev, chan, duration); rdev_remain_on_channel()
656 ret = rdev->ops->remain_on_channel(&rdev->wiphy, wdev, chan, rdev_remain_on_channel()
658 trace_rdev_return_int_cookie(&rdev->wiphy, ret, *cookie); rdev_remain_on_channel()
663 rdev_cancel_remain_on_channel(struct cfg80211_registered_device *rdev, rdev_cancel_remain_on_channel() argument
667 trace_rdev_cancel_remain_on_channel(&rdev->wiphy, wdev, cookie); rdev_cancel_remain_on_channel()
668 ret = rdev->ops->cancel_remain_on_channel(&rdev->wiphy, wdev, cookie); rdev_cancel_remain_on_channel()
669 trace_rdev_return_int(&rdev->wiphy, ret); rdev_cancel_remain_on_channel()
673 static inline int rdev_mgmt_tx(struct cfg80211_registered_device *rdev, rdev_mgmt_tx() argument
679 trace_rdev_mgmt_tx(&rdev->wiphy, wdev, params); rdev_mgmt_tx()
680 ret = rdev->ops->mgmt_tx(&rdev->wiphy, wdev, params, cookie); rdev_mgmt_tx()
681 trace_rdev_return_int_cookie(&rdev->wiphy, ret, *cookie); rdev_mgmt_tx()
686 rdev_mgmt_tx_cancel_wait(struct cfg80211_registered_device *rdev, rdev_mgmt_tx_cancel_wait() argument
690 trace_rdev_mgmt_tx_cancel_wait(&rdev->wiphy, wdev, cookie); rdev_mgmt_tx_cancel_wait()
691 ret = rdev->ops->mgmt_tx_cancel_wait(&rdev->wiphy, wdev, cookie); rdev_mgmt_tx_cancel_wait()
692 trace_rdev_return_int(&rdev->wiphy, ret); rdev_mgmt_tx_cancel_wait()
696 static inline int rdev_set_power_mgmt(struct cfg80211_registered_device *rdev, rdev_set_power_mgmt() argument
701 trace_rdev_set_power_mgmt(&rdev->wiphy, dev, enabled, timeout); rdev_set_power_mgmt()
702 ret = rdev->ops->set_power_mgmt(&rdev->wiphy, dev, enabled, timeout); rdev_set_power_mgmt()
703 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_power_mgmt()
708 rdev_set_cqm_rssi_config(struct cfg80211_registered_device *rdev, rdev_set_cqm_rssi_config() argument
712 trace_rdev_set_cqm_rssi_config(&rdev->wiphy, dev, rssi_thold, rdev_set_cqm_rssi_config()
714 ret = rdev->ops->set_cqm_rssi_config(&rdev->wiphy, dev, rssi_thold, rdev_set_cqm_rssi_config()
716 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_cqm_rssi_config()
721 rdev_set_cqm_txe_config(struct cfg80211_registered_device *rdev, rdev_set_cqm_txe_config() argument
725 trace_rdev_set_cqm_txe_config(&rdev->wiphy, dev, rate, pkts, intvl); rdev_set_cqm_txe_config()
726 ret = rdev->ops->set_cqm_txe_config(&rdev->wiphy, dev, rate, pkts, rdev_set_cqm_txe_config()
728 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_cqm_txe_config()
733 rdev_mgmt_frame_register(struct cfg80211_registered_device *rdev, rdev_mgmt_frame_register() argument
738 trace_rdev_mgmt_frame_register(&rdev->wiphy, wdev , frame_type, reg); rdev_mgmt_frame_register()
739 rdev->ops->mgmt_frame_register(&rdev->wiphy, wdev , frame_type, reg); rdev_mgmt_frame_register()
740 trace_rdev_return_void(&rdev->wiphy); rdev_mgmt_frame_register()
743 static inline int rdev_set_antenna(struct cfg80211_registered_device *rdev, rdev_set_antenna() argument
747 trace_rdev_set_antenna(&rdev->wiphy, tx_ant, rx_ant); rdev_set_antenna()
748 ret = rdev->ops->set_antenna(&rdev->wiphy, tx_ant, rx_ant); rdev_set_antenna()
749 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_antenna()
753 static inline int rdev_get_antenna(struct cfg80211_registered_device *rdev, rdev_get_antenna() argument
757 trace_rdev_get_antenna(&rdev->wiphy); rdev_get_antenna()
758 ret = rdev->ops->get_antenna(&rdev->wiphy, tx_ant, rx_ant); rdev_get_antenna()
760 trace_rdev_return_int(&rdev->wiphy, ret); rdev_get_antenna()
762 trace_rdev_return_int_tx_rx(&rdev->wiphy, ret, *tx_ant, rdev_get_antenna()
768 rdev_sched_scan_start(struct cfg80211_registered_device *rdev, rdev_sched_scan_start() argument
773 trace_rdev_sched_scan_start(&rdev->wiphy, dev, request); rdev_sched_scan_start()
774 ret = rdev->ops->sched_scan_start(&rdev->wiphy, dev, request); rdev_sched_scan_start()
775 trace_rdev_return_int(&rdev->wiphy, ret); rdev_sched_scan_start()
779 static inline int rdev_sched_scan_stop(struct cfg80211_registered_device *rdev, rdev_sched_scan_stop() argument
783 trace_rdev_sched_scan_stop(&rdev->wiphy, dev); rdev_sched_scan_stop()
784 ret = rdev->ops->sched_scan_stop(&rdev->wiphy, dev); rdev_sched_scan_stop()
785 trace_rdev_return_int(&rdev->wiphy, ret); rdev_sched_scan_stop()
789 static inline int rdev_set_rekey_data(struct cfg80211_registered_device *rdev, rdev_set_rekey_data() argument
794 trace_rdev_set_rekey_data(&rdev->wiphy, dev); rdev_set_rekey_data()
795 ret = rdev->ops->set_rekey_data(&rdev->wiphy, dev, data); rdev_set_rekey_data()
796 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_rekey_data()
800 static inline int rdev_tdls_mgmt(struct cfg80211_registered_device *rdev, rdev_tdls_mgmt() argument
807 trace_rdev_tdls_mgmt(&rdev->wiphy, dev, peer, action_code, rdev_tdls_mgmt()
810 ret = rdev->ops->tdls_mgmt(&rdev->wiphy, dev, peer, action_code, rdev_tdls_mgmt()
813 trace_rdev_return_int(&rdev->wiphy, ret); rdev_tdls_mgmt()
817 static inline int rdev_tdls_oper(struct cfg80211_registered_device *rdev, rdev_tdls_oper() argument
822 trace_rdev_tdls_oper(&rdev->wiphy, dev, peer, oper); rdev_tdls_oper()
823 ret = rdev->ops->tdls_oper(&rdev->wiphy, dev, peer, oper); rdev_tdls_oper()
824 trace_rdev_return_int(&rdev->wiphy, ret); rdev_tdls_oper()
828 static inline int rdev_probe_client(struct cfg80211_registered_device *rdev, rdev_probe_client() argument
833 trace_rdev_probe_client(&rdev->wiphy, dev, peer); rdev_probe_client()
834 ret = rdev->ops->probe_client(&rdev->wiphy, dev, peer, cookie); rdev_probe_client()
835 trace_rdev_return_int_cookie(&rdev->wiphy, ret, *cookie); rdev_probe_client()
839 static inline int rdev_set_noack_map(struct cfg80211_registered_device *rdev, rdev_set_noack_map() argument
843 trace_rdev_set_noack_map(&rdev->wiphy, dev, noack_map); rdev_set_noack_map()
844 ret = rdev->ops->set_noack_map(&rdev->wiphy, dev, noack_map); rdev_set_noack_map()
845 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_noack_map()
850 rdev_get_channel(struct cfg80211_registered_device *rdev, rdev_get_channel() argument
856 trace_rdev_get_channel(&rdev->wiphy, wdev); rdev_get_channel()
857 ret = rdev->ops->get_channel(&rdev->wiphy, wdev, chandef); rdev_get_channel()
858 trace_rdev_return_chandef(&rdev->wiphy, ret, chandef); rdev_get_channel()
863 static inline int rdev_start_p2p_device(struct cfg80211_registered_device *rdev, rdev_start_p2p_device() argument
868 trace_rdev_start_p2p_device(&rdev->wiphy, wdev); rdev_start_p2p_device()
869 ret = rdev->ops->start_p2p_device(&rdev->wiphy, wdev); rdev_start_p2p_device()
870 trace_rdev_return_int(&rdev->wiphy, ret); rdev_start_p2p_device()
874 static inline void rdev_stop_p2p_device(struct cfg80211_registered_device *rdev, rdev_stop_p2p_device() argument
877 trace_rdev_stop_p2p_device(&rdev->wiphy, wdev); rdev_stop_p2p_device()
878 rdev->ops->stop_p2p_device(&rdev->wiphy, wdev); rdev_stop_p2p_device()
879 trace_rdev_return_void(&rdev->wiphy); rdev_stop_p2p_device()
882 static inline int rdev_set_mac_acl(struct cfg80211_registered_device *rdev, rdev_set_mac_acl() argument
888 trace_rdev_set_mac_acl(&rdev->wiphy, dev, params); rdev_set_mac_acl()
889 ret = rdev->ops->set_mac_acl(&rdev->wiphy, dev, params); rdev_set_mac_acl()
890 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_mac_acl()
894 static inline int rdev_update_ft_ies(struct cfg80211_registered_device *rdev, rdev_update_ft_ies() argument
900 trace_rdev_update_ft_ies(&rdev->wiphy, dev, ftie); rdev_update_ft_ies()
901 ret = rdev->ops->update_ft_ies(&rdev->wiphy, dev, ftie); rdev_update_ft_ies()
902 trace_rdev_return_int(&rdev->wiphy, ret); rdev_update_ft_ies()
906 static inline int rdev_crit_proto_start(struct cfg80211_registered_device *rdev, rdev_crit_proto_start() argument
913 trace_rdev_crit_proto_start(&rdev->wiphy, wdev, protocol, duration); rdev_crit_proto_start()
914 ret = rdev->ops->crit_proto_start(&rdev->wiphy, wdev, rdev_crit_proto_start()
916 trace_rdev_return_int(&rdev->wiphy, ret); rdev_crit_proto_start()
920 static inline void rdev_crit_proto_stop(struct cfg80211_registered_device *rdev, rdev_crit_proto_stop() argument
923 trace_rdev_crit_proto_stop(&rdev->wiphy, wdev); rdev_crit_proto_stop()
924 rdev->ops->crit_proto_stop(&rdev->wiphy, wdev); rdev_crit_proto_stop()
925 trace_rdev_return_void(&rdev->wiphy); rdev_crit_proto_stop()
928 static inline int rdev_channel_switch(struct cfg80211_registered_device *rdev, rdev_channel_switch() argument
934 trace_rdev_channel_switch(&rdev->wiphy, dev, params); rdev_channel_switch()
935 ret = rdev->ops->channel_switch(&rdev->wiphy, dev, params); rdev_channel_switch()
936 trace_rdev_return_int(&rdev->wiphy, ret); rdev_channel_switch()
940 static inline int rdev_set_qos_map(struct cfg80211_registered_device *rdev, rdev_set_qos_map() argument
946 if (rdev->ops->set_qos_map) { rdev_set_qos_map()
947 trace_rdev_set_qos_map(&rdev->wiphy, dev, qos_map); rdev_set_qos_map()
948 ret = rdev->ops->set_qos_map(&rdev->wiphy, dev, qos_map); rdev_set_qos_map()
949 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_qos_map()
956 rdev_set_ap_chanwidth(struct cfg80211_registered_device *rdev, rdev_set_ap_chanwidth() argument
961 trace_rdev_set_ap_chanwidth(&rdev->wiphy, dev, chandef); rdev_set_ap_chanwidth()
962 ret = rdev->ops->set_ap_chanwidth(&rdev->wiphy, dev, chandef); rdev_set_ap_chanwidth()
963 trace_rdev_return_int(&rdev->wiphy, ret); rdev_set_ap_chanwidth()
969 rdev_add_tx_ts(struct cfg80211_registered_device *rdev, rdev_add_tx_ts() argument
975 trace_rdev_add_tx_ts(&rdev->wiphy, dev, tsid, peer, rdev_add_tx_ts()
977 if (rdev->ops->add_tx_ts) rdev_add_tx_ts()
978 ret = rdev->ops->add_tx_ts(&rdev->wiphy, dev, tsid, peer, rdev_add_tx_ts()
980 trace_rdev_return_int(&rdev->wiphy, ret); rdev_add_tx_ts()
986 rdev_del_tx_ts(struct cfg80211_registered_device *rdev, rdev_del_tx_ts() argument
991 trace_rdev_del_tx_ts(&rdev->wiphy, dev, tsid, peer); rdev_del_tx_ts()
992 if (rdev->ops->del_tx_ts) rdev_del_tx_ts()
993 ret = rdev->ops->del_tx_ts(&rdev->wiphy, dev, tsid, peer); rdev_del_tx_ts()
994 trace_rdev_return_int(&rdev->wiphy, ret); rdev_del_tx_ts()
1000 rdev_tdls_channel_switch(struct cfg80211_registered_device *rdev, rdev_tdls_channel_switch() argument
1006 trace_rdev_tdls_channel_switch(&rdev->wiphy, dev, addr, oper_class, rdev_tdls_channel_switch()
1008 ret = rdev->ops->tdls_channel_switch(&rdev->wiphy, dev, addr, rdev_tdls_channel_switch()
1010 trace_rdev_return_int(&rdev->wiphy, ret); rdev_tdls_channel_switch()
1015 rdev_tdls_cancel_channel_switch(struct cfg80211_registered_device *rdev, rdev_tdls_cancel_channel_switch() argument
1018 trace_rdev_tdls_cancel_channel_switch(&rdev->wiphy, dev, addr); rdev_tdls_cancel_channel_switch()
1019 rdev->ops->tdls_cancel_channel_switch(&rdev->wiphy, dev, addr); rdev_tdls_cancel_channel_switch()
1020 trace_rdev_return_void(&rdev->wiphy); rdev_tdls_cancel_channel_switch()
H A Ddebugfs.h5 void cfg80211_debugfs_rdev_add(struct cfg80211_registered_device *rdev);
8 void cfg80211_debugfs_rdev_add(struct cfg80211_registered_device *rdev) {} argument
H A Dap.c6 #include "rdev-ops.h"
9 int __cfg80211_stop_ap(struct cfg80211_registered_device *rdev, __cfg80211_stop_ap() argument
17 if (!rdev->ops->stop_ap) __cfg80211_stop_ap()
27 err = rdev_stop_ap(rdev, dev); __cfg80211_stop_ap()
32 rdev_set_qos_map(rdev, dev, NULL); __cfg80211_stop_ap()
40 int cfg80211_stop_ap(struct cfg80211_registered_device *rdev, cfg80211_stop_ap() argument
47 err = __cfg80211_stop_ap(rdev, dev, notify); cfg80211_stop_ap()
H A Dcore.c29 #include "rdev-ops.h"
56 struct cfg80211_registered_device *result = NULL, *rdev; cfg80211_rdev_by_wiphy_idx() local
60 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { cfg80211_rdev_by_wiphy_idx()
61 if (rdev->wiphy_idx == wiphy_idx) { cfg80211_rdev_by_wiphy_idx()
62 result = rdev; cfg80211_rdev_by_wiphy_idx()
72 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); get_wiphy_idx() local
74 return rdev->wiphy_idx; get_wiphy_idx()
79 struct cfg80211_registered_device *rdev; wiphy_idx_to_wiphy() local
83 rdev = cfg80211_rdev_by_wiphy_idx(wiphy_idx); wiphy_idx_to_wiphy()
84 if (!rdev) wiphy_idx_to_wiphy()
86 return &rdev->wiphy; wiphy_idx_to_wiphy()
89 static int cfg80211_dev_check_name(struct cfg80211_registered_device *rdev, cfg80211_dev_check_name() argument
99 if (taken == strlen(newname) && wiphy_idx != rdev->wiphy_idx) { cfg80211_dev_check_name()
120 int cfg80211_dev_rename(struct cfg80211_registered_device *rdev, cfg80211_dev_rename() argument
128 if (strcmp(newname, wiphy_name(&rdev->wiphy)) == 0) cfg80211_dev_rename()
131 result = cfg80211_dev_check_name(rdev, newname); cfg80211_dev_rename()
135 result = device_rename(&rdev->wiphy.dev, newname); cfg80211_dev_rename()
139 if (rdev->wiphy.debugfsdir && cfg80211_dev_rename()
140 !debugfs_rename(rdev->wiphy.debugfsdir->d_parent, cfg80211_dev_rename()
141 rdev->wiphy.debugfsdir, cfg80211_dev_rename()
142 rdev->wiphy.debugfsdir->d_parent, cfg80211_dev_rename()
146 nl80211_notify_wiphy(rdev, NL80211_CMD_NEW_WIPHY); cfg80211_dev_rename()
151 int cfg80211_switch_netns(struct cfg80211_registered_device *rdev, cfg80211_switch_netns() argument
157 if (!(rdev->wiphy.flags & WIPHY_FLAG_NETNS_OK)) cfg80211_switch_netns()
160 list_for_each_entry(wdev, &rdev->wdev_list, list) { cfg80211_switch_netns()
172 net = wiphy_net(&rdev->wiphy); cfg80211_switch_netns()
174 list_for_each_entry_continue_reverse(wdev, &rdev->wdev_list, cfg80211_switch_netns()
188 wiphy_net_set(&rdev->wiphy, net); cfg80211_switch_netns()
190 err = device_rename(&rdev->wiphy.dev, dev_name(&rdev->wiphy.dev)); cfg80211_switch_netns()
198 struct cfg80211_registered_device *rdev = data; cfg80211_rfkill_poll() local
200 rdev_rfkill_poll(rdev); cfg80211_rfkill_poll()
203 void cfg80211_stop_p2p_device(struct cfg80211_registered_device *rdev, cfg80211_stop_p2p_device() argument
214 rdev_stop_p2p_device(rdev, wdev); cfg80211_stop_p2p_device()
217 rdev->opencount--; cfg80211_stop_p2p_device()
219 if (rdev->scan_req && rdev->scan_req->wdev == wdev) { cfg80211_stop_p2p_device()
220 if (WARN_ON(!rdev->scan_req->notified)) cfg80211_stop_p2p_device()
221 rdev->scan_req->aborted = true; cfg80211_stop_p2p_device()
222 ___cfg80211_scan_done(rdev, false); cfg80211_stop_p2p_device()
228 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_shutdown_all_interfaces() local
233 list_for_each_entry(wdev, &rdev->wdev_list, list) { cfg80211_shutdown_all_interfaces()
241 cfg80211_stop_p2p_device(rdev, wdev); cfg80211_shutdown_all_interfaces()
252 struct cfg80211_registered_device *rdev = data; cfg80211_rfkill_set_block() local
258 cfg80211_shutdown_all_interfaces(&rdev->wiphy); cfg80211_rfkill_set_block()
266 struct cfg80211_registered_device *rdev; cfg80211_rfkill_sync_work() local
268 rdev = container_of(work, struct cfg80211_registered_device, rfkill_sync); cfg80211_rfkill_sync_work()
269 cfg80211_rfkill_set_block(rdev, rfkill_blocked(rdev->rfkill)); cfg80211_rfkill_sync_work()
274 struct cfg80211_registered_device *rdev; cfg80211_event_work() local
276 rdev = container_of(work, struct cfg80211_registered_device, cfg80211_event_work()
280 cfg80211_process_rdev_events(rdev); cfg80211_event_work()
284 void cfg80211_destroy_ifaces(struct cfg80211_registered_device *rdev) cfg80211_destroy_ifaces() argument
290 spin_lock_irq(&rdev->destroy_list_lock); cfg80211_destroy_ifaces()
291 while ((item = list_first_entry_or_null(&rdev->destroy_list, cfg80211_destroy_ifaces()
299 spin_unlock_irq(&rdev->destroy_list_lock); cfg80211_destroy_ifaces()
301 list_for_each_entry_safe(wdev, tmp, &rdev->wdev_list, list) { cfg80211_destroy_ifaces()
303 rdev_del_virtual_intf(rdev, wdev); cfg80211_destroy_ifaces()
306 spin_lock_irq(&rdev->destroy_list_lock); cfg80211_destroy_ifaces()
308 spin_unlock_irq(&rdev->destroy_list_lock); cfg80211_destroy_ifaces()
313 struct cfg80211_registered_device *rdev; cfg80211_destroy_iface_wk() local
315 rdev = container_of(work, struct cfg80211_registered_device, cfg80211_destroy_iface_wk()
319 cfg80211_destroy_ifaces(rdev); cfg80211_destroy_iface_wk()
325 struct cfg80211_registered_device *rdev; cfg80211_sched_scan_stop_wk() local
327 rdev = container_of(work, struct cfg80211_registered_device, cfg80211_sched_scan_stop_wk()
332 __cfg80211_stop_sched_scan(rdev, false); cfg80211_sched_scan_stop_wk()
344 struct cfg80211_registered_device *rdev; wiphy_new_nm() local
356 alloc_size = sizeof(*rdev) + sizeof_priv; wiphy_new_nm()
358 rdev = kzalloc(alloc_size, GFP_KERNEL); wiphy_new_nm()
359 if (!rdev) wiphy_new_nm()
362 rdev->ops = ops; wiphy_new_nm()
364 rdev->wiphy_idx = atomic_inc_return(&wiphy_counter); wiphy_new_nm()
366 if (unlikely(rdev->wiphy_idx < 0)) { wiphy_new_nm()
369 kfree(rdev); wiphy_new_nm()
374 rdev->wiphy_idx--; wiphy_new_nm()
381 rv = cfg80211_dev_check_name(rdev, requested_name); wiphy_new_nm()
388 rv = dev_set_name(&rdev->wiphy.dev, "%s", requested_name); wiphy_new_nm()
400 dev_set_name(&rdev->wiphy.dev, PHY_NAME "%d", rdev->wiphy_idx); wiphy_new_nm()
403 INIT_LIST_HEAD(&rdev->wdev_list); wiphy_new_nm()
404 INIT_LIST_HEAD(&rdev->beacon_registrations); wiphy_new_nm()
405 spin_lock_init(&rdev->beacon_registrations_lock); wiphy_new_nm()
406 spin_lock_init(&rdev->bss_lock); wiphy_new_nm()
407 INIT_LIST_HEAD(&rdev->bss_list); wiphy_new_nm()
408 INIT_WORK(&rdev->scan_done_wk, __cfg80211_scan_done); wiphy_new_nm()
409 INIT_WORK(&rdev->sched_scan_results_wk, __cfg80211_sched_scan_results); wiphy_new_nm()
410 INIT_LIST_HEAD(&rdev->mlme_unreg); wiphy_new_nm()
411 spin_lock_init(&rdev->mlme_unreg_lock); wiphy_new_nm()
412 INIT_WORK(&rdev->mlme_unreg_wk, cfg80211_mlme_unreg_wk); wiphy_new_nm()
413 INIT_DELAYED_WORK(&rdev->dfs_update_channels_wk, wiphy_new_nm()
416 rdev->wiphy.wext = &cfg80211_wext_handler; wiphy_new_nm()
419 device_initialize(&rdev->wiphy.dev); wiphy_new_nm()
420 rdev->wiphy.dev.class = &ieee80211_class; wiphy_new_nm()
421 rdev->wiphy.dev.platform_data = rdev; wiphy_new_nm()
422 device_enable_async_suspend(&rdev->wiphy.dev); wiphy_new_nm()
424 INIT_LIST_HEAD(&rdev->destroy_list); wiphy_new_nm()
425 spin_lock_init(&rdev->destroy_list_lock); wiphy_new_nm()
426 INIT_WORK(&rdev->destroy_work, cfg80211_destroy_iface_wk); wiphy_new_nm()
427 INIT_WORK(&rdev->sched_scan_stop_wk, cfg80211_sched_scan_stop_wk); wiphy_new_nm()
430 rdev->wiphy.flags |= WIPHY_FLAG_PS_ON_BY_DEFAULT; wiphy_new_nm()
433 wiphy_net_set(&rdev->wiphy, &init_net); wiphy_new_nm()
435 rdev->rfkill_ops.set_block = cfg80211_rfkill_set_block; wiphy_new_nm()
436 rdev->rfkill = rfkill_alloc(dev_name(&rdev->wiphy.dev), wiphy_new_nm()
437 &rdev->wiphy.dev, RFKILL_TYPE_WLAN, wiphy_new_nm()
438 &rdev->rfkill_ops, rdev); wiphy_new_nm()
440 if (!rdev->rfkill) { wiphy_new_nm()
441 kfree(rdev); wiphy_new_nm()
445 INIT_WORK(&rdev->rfkill_sync, cfg80211_rfkill_sync_work); wiphy_new_nm()
446 INIT_WORK(&rdev->conn_work, cfg80211_conn_work); wiphy_new_nm()
447 INIT_WORK(&rdev->event_work, cfg80211_event_work); wiphy_new_nm()
449 init_waitqueue_head(&rdev->dev_wait); wiphy_new_nm()
456 rdev->wiphy.retry_short = 7; wiphy_new_nm()
457 rdev->wiphy.retry_long = 4; wiphy_new_nm()
458 rdev->wiphy.frag_threshold = (u32) -1; wiphy_new_nm()
459 rdev->wiphy.rts_threshold = (u32) -1; wiphy_new_nm()
460 rdev->wiphy.coverage_class = 0; wiphy_new_nm()
462 rdev->wiphy.max_num_csa_counters = 1; wiphy_new_nm()
464 rdev->wiphy.max_sched_scan_plans = 1; wiphy_new_nm()
465 rdev->wiphy.max_sched_scan_plan_interval = U32_MAX; wiphy_new_nm()
467 return &rdev->wiphy; wiphy_new_nm()
548 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); wiphy_register() local
567 (!rdev->ops->tdls_channel_switch || wiphy_register()
568 !rdev->ops->tdls_cancel_channel_switch))) wiphy_register()
616 !rdev->ops->set_mac_acl))) wiphy_register()
687 if (WARN_ON(rdev->wiphy.wowlan && rdev->wiphy.wowlan->n_patterns && wiphy_register()
688 (!rdev->wiphy.wowlan->pattern_min_len || wiphy_register()
689 rdev->wiphy.wowlan->pattern_min_len > wiphy_register()
690 rdev->wiphy.wowlan->pattern_max_len))) wiphy_register()
697 rdev->wiphy.features |= NL80211_FEATURE_SCAN_FLUSH; wiphy_register()
700 res = device_add(&rdev->wiphy.dev); wiphy_register()
709 list_add_rcu(&rdev->list, &cfg80211_rdev_list); wiphy_register()
713 rdev->wiphy.debugfsdir = wiphy_register()
714 debugfs_create_dir(wiphy_name(&rdev->wiphy), wiphy_register()
716 if (IS_ERR(rdev->wiphy.debugfsdir)) wiphy_register()
717 rdev->wiphy.debugfsdir = NULL; wiphy_register()
719 cfg80211_debugfs_rdev_add(rdev); wiphy_register()
720 nl80211_notify_wiphy(rdev, NL80211_CMD_NEW_WIPHY); wiphy_register()
733 rdev->wiphy.registered = true; wiphy_register()
736 res = rfkill_register(rdev->rfkill); wiphy_register()
738 rfkill_destroy(rdev->rfkill); wiphy_register()
739 rdev->rfkill = NULL; wiphy_register()
740 wiphy_unregister(&rdev->wiphy); wiphy_register()
750 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); wiphy_rfkill_start_polling() local
752 if (!rdev->ops->rfkill_poll) wiphy_rfkill_start_polling()
754 rdev->rfkill_ops.poll = cfg80211_rfkill_poll; wiphy_rfkill_start_polling()
755 rfkill_resume_polling(rdev->rfkill); wiphy_rfkill_start_polling()
761 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); wiphy_rfkill_stop_polling() local
763 rfkill_pause_polling(rdev->rfkill); wiphy_rfkill_stop_polling()
769 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); wiphy_unregister() local
771 wait_event(rdev->dev_wait, ({ wiphy_unregister()
774 __count = rdev->opencount; wiphy_unregister()
778 if (rdev->rfkill) wiphy_unregister()
779 rfkill_unregister(rdev->rfkill); wiphy_unregister()
782 nl80211_notify_wiphy(rdev, NL80211_CMD_DEL_WIPHY); wiphy_unregister()
783 rdev->wiphy.registered = false; wiphy_unregister()
785 WARN_ON(!list_empty(&rdev->wdev_list)); wiphy_unregister()
791 debugfs_remove_recursive(rdev->wiphy.debugfsdir); wiphy_unregister()
792 list_del_rcu(&rdev->list); wiphy_unregister()
802 device_del(&rdev->wiphy.dev); wiphy_unregister()
806 flush_work(&rdev->scan_done_wk); wiphy_unregister()
807 cancel_work_sync(&rdev->conn_work); wiphy_unregister()
808 flush_work(&rdev->event_work); wiphy_unregister()
809 cancel_delayed_work_sync(&rdev->dfs_update_channels_wk); wiphy_unregister()
810 flush_work(&rdev->destroy_work); wiphy_unregister()
811 flush_work(&rdev->sched_scan_stop_wk); wiphy_unregister()
812 flush_work(&rdev->mlme_unreg_wk); wiphy_unregister()
815 if (rdev->wiphy.wowlan_config && rdev->ops->set_wakeup) wiphy_unregister()
816 rdev_set_wakeup(rdev, false); wiphy_unregister()
818 cfg80211_rdev_free_wowlan(rdev); wiphy_unregister()
819 cfg80211_rdev_free_coalesce(rdev); wiphy_unregister()
823 void cfg80211_dev_free(struct cfg80211_registered_device *rdev) cfg80211_dev_free() argument
827 rfkill_destroy(rdev->rfkill); cfg80211_dev_free()
828 list_for_each_entry_safe(reg, treg, &rdev->beacon_registrations, list) { cfg80211_dev_free()
832 list_for_each_entry_safe(scan, tmp, &rdev->bss_list, list) cfg80211_dev_free()
833 cfg80211_put_bss(&rdev->wiphy, &scan->pub); cfg80211_dev_free()
834 kfree(rdev); cfg80211_dev_free()
845 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); wiphy_rfkill_set_hw_state() local
847 if (rfkill_set_hw_state(rdev->rfkill, blocked)) wiphy_rfkill_set_hw_state()
848 schedule_work(&rdev->rfkill_sync); wiphy_rfkill_set_hw_state()
854 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_unregister_wdev() local
862 rdev->devlist_generation++; cfg80211_unregister_wdev()
867 cfg80211_stop_p2p_device(rdev, wdev); cfg80211_unregister_wdev()
880 void cfg80211_update_iface_num(struct cfg80211_registered_device *rdev, cfg80211_update_iface_num() argument
885 rdev->num_running_ifaces += num; cfg80211_update_iface_num()
887 rdev->num_running_monitor_ifaces += num; cfg80211_update_iface_num()
890 void __cfg80211_leave(struct cfg80211_registered_device *rdev, __cfg80211_leave() argument
901 __cfg80211_leave_ibss(rdev, dev, true); __cfg80211_leave()
905 sched_scan_req = rtnl_dereference(rdev->sched_scan_req); __cfg80211_leave()
907 __cfg80211_stop_sched_scan(rdev, false); __cfg80211_leave()
915 cfg80211_disconnect(rdev, dev, __cfg80211_leave()
919 __cfg80211_leave_mesh(rdev, dev); __cfg80211_leave()
923 __cfg80211_stop_ap(rdev, dev, true); __cfg80211_leave()
926 __cfg80211_leave_ocb(rdev, dev); __cfg80211_leave()
945 void cfg80211_leave(struct cfg80211_registered_device *rdev, cfg80211_leave() argument
949 __cfg80211_leave(rdev, wdev); cfg80211_leave()
956 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_stop_iface() local
971 queue_work(cfg80211_wq, &rdev->event_work); cfg80211_stop_iface()
980 struct cfg80211_registered_device *rdev; cfg80211_netdev_notifier_call() local
986 rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_netdev_notifier_call()
996 * NB: cannot take rdev->mtx here because this may be cfg80211_netdev_notifier_call()
1006 wdev->identifier = ++rdev->wdev_id; cfg80211_netdev_notifier_call()
1007 list_add_rcu(&wdev->list, &rdev->wdev_list); cfg80211_netdev_notifier_call()
1008 rdev->devlist_generation++; cfg80211_netdev_notifier_call()
1012 if (sysfs_create_link(&dev->dev.kobj, &rdev->wiphy.dev.kobj, cfg80211_netdev_notifier_call()
1036 cfg80211_leave(rdev, wdev); cfg80211_netdev_notifier_call()
1039 cfg80211_update_iface_num(rdev, wdev->iftype, -1); cfg80211_netdev_notifier_call()
1040 if (rdev->scan_req && rdev->scan_req->wdev == wdev) { cfg80211_netdev_notifier_call()
1041 if (WARN_ON(!rdev->scan_req->notified)) cfg80211_netdev_notifier_call()
1042 rdev->scan_req->aborted = true; cfg80211_netdev_notifier_call()
1043 ___cfg80211_scan_done(rdev, false); cfg80211_netdev_notifier_call()
1046 sched_scan_req = rtnl_dereference(rdev->sched_scan_req); cfg80211_netdev_notifier_call()
1049 __cfg80211_stop_sched_scan(rdev, false); cfg80211_netdev_notifier_call()
1052 rdev->opencount--; cfg80211_netdev_notifier_call()
1053 wake_up(&rdev->dev_wait); cfg80211_netdev_notifier_call()
1056 cfg80211_update_iface_num(rdev, wdev->iftype, 1); cfg80211_netdev_notifier_call()
1061 cfg80211_ibss_wext_join(rdev, wdev); cfg80211_netdev_notifier_call()
1064 cfg80211_mgd_wext_connect(rdev, wdev); cfg80211_netdev_notifier_call()
1078 __cfg80211_join_mesh(rdev, dev, cfg80211_netdev_notifier_call()
1088 rdev->opencount++; cfg80211_netdev_notifier_call()
1096 rdev->ops->set_power_mgmt) cfg80211_netdev_notifier_call()
1097 if (rdev_set_power_mgmt(rdev, dev, wdev->ps, cfg80211_netdev_notifier_call()
1114 rdev->devlist_generation++; cfg80211_netdev_notifier_call()
1143 if (rfkill_blocked(rdev->rfkill)) cfg80211_netdev_notifier_call()
1161 struct cfg80211_registered_device *rdev; cfg80211_pernet_exit() local
1164 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { cfg80211_pernet_exit()
1165 if (net_eq(wiphy_net(&rdev->wiphy), net)) cfg80211_pernet_exit()
1166 WARN_ON(cfg80211_switch_netns(rdev, &init_net)); cfg80211_pernet_exit()
H A Dsysfs.c19 #include "rdev-ops.h"
78 struct cfg80211_registered_device *rdev = dev_to_rdev(dev); wiphy_dev_release() local
80 cfg80211_dev_free(rdev); wiphy_dev_release()
90 static void cfg80211_leave_all(struct cfg80211_registered_device *rdev) cfg80211_leave_all() argument
94 list_for_each_entry(wdev, &rdev->wdev_list, list) cfg80211_leave_all()
95 cfg80211_leave(rdev, wdev); cfg80211_leave_all()
100 struct cfg80211_registered_device *rdev = dev_to_rdev(dev); wiphy_suspend() local
103 rdev->suspend_at = get_seconds(); wiphy_suspend()
106 if (rdev->wiphy.registered) { wiphy_suspend()
107 if (!rdev->wiphy.wowlan_config) wiphy_suspend()
108 cfg80211_leave_all(rdev); wiphy_suspend()
109 if (rdev->ops->suspend) wiphy_suspend()
110 ret = rdev_suspend(rdev, rdev->wiphy.wowlan_config); wiphy_suspend()
113 cfg80211_leave_all(rdev); wiphy_suspend()
114 ret = rdev_suspend(rdev, NULL); wiphy_suspend()
124 struct cfg80211_registered_device *rdev = dev_to_rdev(dev); wiphy_resume() local
128 cfg80211_bss_age(rdev, get_seconds() - rdev->suspend_at); wiphy_resume()
130 if (rdev->ops->resume) { wiphy_resume()
132 if (rdev->wiphy.registered) wiphy_resume()
133 ret = rdev_resume(rdev); wiphy_resume()
H A Dnl80211.h8 void nl80211_notify_wiphy(struct cfg80211_registered_device *rdev,
10 void nl80211_send_scan_start(struct cfg80211_registered_device *rdev,
12 struct sk_buff *nl80211_build_scan_msg(struct cfg80211_registered_device *rdev,
14 void nl80211_send_scan_result(struct cfg80211_registered_device *rdev,
16 void nl80211_send_sched_scan(struct cfg80211_registered_device *rdev,
18 void nl80211_send_sched_scan_results(struct cfg80211_registered_device *rdev,
35 void nl80211_send_rx_auth(struct cfg80211_registered_device *rdev,
38 void nl80211_send_rx_assoc(struct cfg80211_registered_device *rdev,
42 void nl80211_send_deauth(struct cfg80211_registered_device *rdev,
45 void nl80211_send_disassoc(struct cfg80211_registered_device *rdev,
48 void nl80211_send_auth_timeout(struct cfg80211_registered_device *rdev,
51 void nl80211_send_assoc_timeout(struct cfg80211_registered_device *rdev,
54 void nl80211_send_connect_result(struct cfg80211_registered_device *rdev,
59 void nl80211_send_roamed(struct cfg80211_registered_device *rdev,
63 void nl80211_send_disconnected(struct cfg80211_registered_device *rdev,
68 nl80211_michael_mic_failure(struct cfg80211_registered_device *rdev,
78 void nl80211_send_ibss_bssid(struct cfg80211_registered_device *rdev,
82 int nl80211_send_mgmt(struct cfg80211_registered_device *rdev,
88 nl80211_radar_notify(struct cfg80211_registered_device *rdev,
95 void cfg80211_rdev_free_coalesce(struct cfg80211_registered_device *rdev);
H A Docb.c18 #include "rdev-ops.h"
20 int __cfg80211_join_ocb(struct cfg80211_registered_device *rdev, __cfg80211_join_ocb() argument
35 err = rdev_join_ocb(rdev, dev, setup); __cfg80211_join_ocb()
42 int cfg80211_join_ocb(struct cfg80211_registered_device *rdev, cfg80211_join_ocb() argument
50 err = __cfg80211_join_ocb(rdev, dev, setup); cfg80211_join_ocb()
56 int __cfg80211_leave_ocb(struct cfg80211_registered_device *rdev, __cfg80211_leave_ocb() argument
67 if (!rdev->ops->leave_ocb) __cfg80211_leave_ocb()
70 err = rdev_leave_ocb(rdev, dev); __cfg80211_leave_ocb()
77 int cfg80211_leave_ocb(struct cfg80211_registered_device *rdev, cfg80211_leave_ocb() argument
84 err = __cfg80211_leave_ocb(rdev, dev); cfg80211_leave_ocb()
H A Dcore.h113 cfg80211_rdev_free_wowlan(struct cfg80211_registered_device *rdev) cfg80211_rdev_free_wowlan() argument
118 if (!rdev->wiphy.wowlan_config) cfg80211_rdev_free_wowlan()
120 for (i = 0; i < rdev->wiphy.wowlan_config->n_patterns; i++) cfg80211_rdev_free_wowlan()
121 kfree(rdev->wiphy.wowlan_config->patterns[i].mask); cfg80211_rdev_free_wowlan()
122 kfree(rdev->wiphy.wowlan_config->patterns); cfg80211_rdev_free_wowlan()
123 if (rdev->wiphy.wowlan_config->tcp && cfg80211_rdev_free_wowlan()
124 rdev->wiphy.wowlan_config->tcp->sock) cfg80211_rdev_free_wowlan()
125 sock_release(rdev->wiphy.wowlan_config->tcp->sock); cfg80211_rdev_free_wowlan()
126 kfree(rdev->wiphy.wowlan_config->tcp); cfg80211_rdev_free_wowlan()
127 kfree(rdev->wiphy.wowlan_config->nd_config); cfg80211_rdev_free_wowlan()
128 kfree(rdev->wiphy.wowlan_config); cfg80211_rdev_free_wowlan()
171 int cfg80211_switch_netns(struct cfg80211_registered_device *rdev,
190 static inline bool cfg80211_has_monitors_only(struct cfg80211_registered_device *rdev) cfg80211_has_monitors_only() argument
194 return rdev->num_running_ifaces == rdev->num_running_monitor_ifaces && cfg80211_has_monitors_only()
195 rdev->num_running_ifaces > 0; cfg80211_has_monitors_only()
261 void cfg80211_destroy_ifaces(struct cfg80211_registered_device *rdev);
264 void cfg80211_dev_free(struct cfg80211_registered_device *rdev);
266 int cfg80211_dev_rename(struct cfg80211_registered_device *rdev,
271 void cfg80211_bss_expire(struct cfg80211_registered_device *rdev);
272 void cfg80211_bss_age(struct cfg80211_registered_device *rdev,
276 int cfg80211_join_ibss(struct cfg80211_registered_device *rdev,
281 int __cfg80211_leave_ibss(struct cfg80211_registered_device *rdev,
283 int cfg80211_leave_ibss(struct cfg80211_registered_device *rdev,
287 int cfg80211_ibss_wext_join(struct cfg80211_registered_device *rdev,
293 int __cfg80211_join_mesh(struct cfg80211_registered_device *rdev,
297 int cfg80211_join_mesh(struct cfg80211_registered_device *rdev,
301 int __cfg80211_leave_mesh(struct cfg80211_registered_device *rdev,
303 int cfg80211_leave_mesh(struct cfg80211_registered_device *rdev,
305 int cfg80211_set_mesh_channel(struct cfg80211_registered_device *rdev,
310 int __cfg80211_join_ocb(struct cfg80211_registered_device *rdev,
313 int cfg80211_join_ocb(struct cfg80211_registered_device *rdev,
316 int __cfg80211_leave_ocb(struct cfg80211_registered_device *rdev,
318 int cfg80211_leave_ocb(struct cfg80211_registered_device *rdev,
322 int __cfg80211_stop_ap(struct cfg80211_registered_device *rdev,
324 int cfg80211_stop_ap(struct cfg80211_registered_device *rdev,
328 int cfg80211_mlme_auth(struct cfg80211_registered_device *rdev,
337 int cfg80211_mlme_assoc(struct cfg80211_registered_device *rdev,
343 int cfg80211_mlme_deauth(struct cfg80211_registered_device *rdev,
347 int cfg80211_mlme_disassoc(struct cfg80211_registered_device *rdev,
351 void cfg80211_mlme_down(struct cfg80211_registered_device *rdev,
359 int cfg80211_mlme_mgmt_tx(struct cfg80211_registered_device *rdev,
369 int cfg80211_connect(struct cfg80211_registered_device *rdev,
381 int cfg80211_disconnect(struct cfg80211_registered_device *rdev,
388 int cfg80211_mgd_wext_connect(struct cfg80211_registered_device *rdev,
403 int cfg80211_validate_key_settings(struct cfg80211_registered_device *rdev,
407 void ___cfg80211_scan_done(struct cfg80211_registered_device *rdev,
410 int __cfg80211_stop_sched_scan(struct cfg80211_registered_device *rdev,
413 int cfg80211_change_iface(struct cfg80211_registered_device *rdev,
416 void cfg80211_process_rdev_events(struct cfg80211_registered_device *rdev);
419 int cfg80211_can_use_iftype_chan(struct cfg80211_registered_device *rdev,
465 int cfg80211_set_monitor_channel(struct cfg80211_registered_device *rdev,
472 int cfg80211_validate_beacon_int(struct cfg80211_registered_device *rdev,
475 void cfg80211_update_iface_num(struct cfg80211_registered_device *rdev,
478 void __cfg80211_leave(struct cfg80211_registered_device *rdev,
480 void cfg80211_leave(struct cfg80211_registered_device *rdev,
483 void cfg80211_stop_p2p_device(struct cfg80211_registered_device *rdev,
H A Dmesh.c6 #include "rdev-ops.h"
96 int __cfg80211_join_mesh(struct cfg80211_registered_device *rdev, __cfg80211_join_mesh() argument
111 if (!(rdev->wiphy.flags & WIPHY_FLAG_MESH_AUTH) && __cfg80211_join_mesh()
121 if (!rdev->ops->join_mesh) __cfg80211_join_mesh()
138 sband = rdev->wiphy.bands[band]; __cfg80211_join_mesh()
171 rdev->wiphy.bands[setup->chandef.chan->band]; __cfg80211_join_mesh()
177 if (!cfg80211_reg_can_beacon(&rdev->wiphy, &setup->chandef, __cfg80211_join_mesh()
181 err = rdev_join_mesh(rdev, dev, conf, setup); __cfg80211_join_mesh()
191 int cfg80211_join_mesh(struct cfg80211_registered_device *rdev, cfg80211_join_mesh() argument
200 err = __cfg80211_join_mesh(rdev, dev, setup, conf); cfg80211_join_mesh()
206 int cfg80211_set_mesh_channel(struct cfg80211_registered_device *rdev, cfg80211_set_mesh_channel() argument
219 if (rdev->ops->libertas_set_mesh_channel) { cfg80211_set_mesh_channel()
226 err = rdev_libertas_set_mesh_channel(rdev, wdev->netdev, cfg80211_set_mesh_channel()
241 int __cfg80211_leave_mesh(struct cfg80211_registered_device *rdev, __cfg80211_leave_mesh() argument
252 if (!rdev->ops->leave_mesh) __cfg80211_leave_mesh()
258 err = rdev_leave_mesh(rdev, dev); __cfg80211_leave_mesh()
262 rdev_set_qos_map(rdev, dev, NULL); __cfg80211_leave_mesh()
268 int cfg80211_leave_mesh(struct cfg80211_registered_device *rdev, cfg80211_leave_mesh() argument
275 err = __cfg80211_leave_mesh(rdev, dev); cfg80211_leave_mesh()
H A Dmlme.c19 #include "rdev-ops.h"
27 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_rx_assoc_resp() local
47 nl80211_send_rx_assoc(rdev, dev, buf, len, GFP_KERNEL, uapsd_queues); cfg80211_rx_assoc_resp()
58 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_process_auth() local
60 nl80211_send_rx_auth(rdev, wdev->netdev, buf, len, GFP_KERNEL); cfg80211_process_auth()
67 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_process_deauth() local
73 nl80211_send_deauth(rdev, wdev->netdev, buf, len, GFP_KERNEL); cfg80211_process_deauth()
86 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_process_disassoc() local
92 nl80211_send_disassoc(rdev, wdev->netdev, buf, len, GFP_KERNEL); cfg80211_process_disassoc()
127 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_auth_timeout() local
131 nl80211_send_auth_timeout(rdev, dev, addr, GFP_KERNEL); cfg80211_auth_timeout()
140 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_assoc_timeout() local
144 nl80211_send_assoc_timeout(rdev, dev, bss->bssid, GFP_KERNEL); cfg80211_assoc_timeout()
176 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_michael_mic_failure() local
194 nl80211_michael_mic_failure(rdev, dev, addr, key_type, key_id, tsc, gfp); cfg80211_michael_mic_failure()
199 int cfg80211_mlme_auth(struct cfg80211_registered_device *rdev, cfg80211_mlme_auth() argument
232 req.bss = cfg80211_get_bss(&rdev->wiphy, chan, bssid, ssid, ssid_len, cfg80211_mlme_auth()
238 err = rdev_auth(rdev, dev, &req); cfg80211_mlme_auth()
240 cfg80211_put_bss(&rdev->wiphy, req.bss); cfg80211_mlme_auth()
278 int cfg80211_mlme_assoc(struct cfg80211_registered_device *rdev, cfg80211_mlme_assoc() argument
296 rdev->wiphy.ht_capa_mod_mask); cfg80211_mlme_assoc()
298 rdev->wiphy.vht_capa_mod_mask); cfg80211_mlme_assoc()
300 req->bss = cfg80211_get_bss(&rdev->wiphy, chan, bssid, ssid, ssid_len, cfg80211_mlme_assoc()
306 err = rdev_assoc(rdev, dev, req); cfg80211_mlme_assoc()
310 cfg80211_put_bss(&rdev->wiphy, req->bss); cfg80211_mlme_assoc()
315 int cfg80211_mlme_deauth(struct cfg80211_registered_device *rdev, cfg80211_mlme_deauth() argument
336 return rdev_deauth(rdev, dev, &req); cfg80211_mlme_deauth()
339 int cfg80211_mlme_disassoc(struct cfg80211_registered_device *rdev, cfg80211_mlme_disassoc() argument
363 err = rdev_disassoc(rdev, dev, &req); cfg80211_mlme_disassoc()
372 void cfg80211_mlme_down(struct cfg80211_registered_device *rdev, cfg80211_mlme_down() argument
380 if (!rdev->ops->deauth) cfg80211_mlme_down()
387 cfg80211_mlme_deauth(rdev, dev, bssid, NULL, 0, cfg80211_mlme_down()
405 cfg80211_process_mlme_unregistrations(struct cfg80211_registered_device *rdev) cfg80211_process_mlme_unregistrations() argument
411 spin_lock_bh(&rdev->mlme_unreg_lock); cfg80211_process_mlme_unregistrations()
412 while ((reg = list_first_entry_or_null(&rdev->mlme_unreg, cfg80211_process_mlme_unregistrations()
416 spin_unlock_bh(&rdev->mlme_unreg_lock); cfg80211_process_mlme_unregistrations()
418 if (rdev->ops->mgmt_frame_register) { cfg80211_process_mlme_unregistrations()
421 rdev_mgmt_frame_register(rdev, reg->wdev, cfg80211_process_mlme_unregistrations()
427 spin_lock_bh(&rdev->mlme_unreg_lock); cfg80211_process_mlme_unregistrations()
429 spin_unlock_bh(&rdev->mlme_unreg_lock); cfg80211_process_mlme_unregistrations()
434 struct cfg80211_registered_device *rdev; cfg80211_mlme_unreg_wk() local
436 rdev = container_of(wk, struct cfg80211_registered_device, cfg80211_mlme_unreg_wk()
440 cfg80211_process_mlme_unregistrations(rdev); cfg80211_mlme_unreg_wk()
449 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_mlme_register_mgmt() local
499 cfg80211_process_mlme_unregistrations(rdev); cfg80211_mlme_register_mgmt()
501 if (rdev->ops->mgmt_frame_register) cfg80211_mlme_register_mgmt()
502 rdev_mgmt_frame_register(rdev, wdev, frame_type, true); cfg80211_mlme_register_mgmt()
515 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_mlme_unregister_socket() local
525 spin_lock(&rdev->mlme_unreg_lock); cfg80211_mlme_unregister_socket()
526 list_add_tail(&reg->list, &rdev->mlme_unreg); cfg80211_mlme_unregister_socket()
527 spin_unlock(&rdev->mlme_unreg_lock); cfg80211_mlme_unregister_socket()
529 schedule_work(&rdev->mlme_unreg_wk); cfg80211_mlme_unregister_socket()
534 if (nlportid && rdev->crit_proto_nlportid == nlportid) { cfg80211_mlme_unregister_socket()
535 rdev->crit_proto_nlportid = 0; cfg80211_mlme_unregister_socket()
536 rdev_crit_proto_stop(rdev, wdev); cfg80211_mlme_unregister_socket()
545 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_mlme_purge_registrations() local
548 spin_lock(&rdev->mlme_unreg_lock); cfg80211_mlme_purge_registrations()
549 list_splice_tail_init(&wdev->mgmt_registrations, &rdev->mlme_unreg); cfg80211_mlme_purge_registrations()
550 spin_unlock(&rdev->mlme_unreg_lock); cfg80211_mlme_purge_registrations()
553 cfg80211_process_mlme_unregistrations(rdev); cfg80211_mlme_purge_registrations()
556 int cfg80211_mlme_mgmt_tx(struct cfg80211_registered_device *rdev, cfg80211_mlme_mgmt_tx() argument
566 if (!rdev->ops->mgmt_tx) cfg80211_mlme_mgmt_tx()
651 return rdev_mgmt_tx(rdev, wdev, params, cookie); cfg80211_mlme_mgmt_tx()
658 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_rx_mgmt() local
696 if (nl80211_send_mgmt(rdev, wdev, reg->nlportid, cfg80211_rx_mgmt()
715 struct cfg80211_registered_device *rdev; cfg80211_dfs_channels_update_work() local
725 rdev = container_of(delayed_work, struct cfg80211_registered_device, cfg80211_dfs_channels_update_work()
727 wiphy = &rdev->wiphy; cfg80211_dfs_channels_update_work()
751 nl80211_radar_notify(rdev, &chandef, cfg80211_dfs_channels_update_work()
768 queue_delayed_work(cfg80211_wq, &rdev->dfs_update_channels_wk, cfg80211_dfs_channels_update_work()
777 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_radar_event() local
789 queue_delayed_work(cfg80211_wq, &rdev->dfs_update_channels_wk, cfg80211_radar_event()
792 nl80211_radar_notify(rdev, chandef, NL80211_RADAR_DETECTED, NULL, gfp); cfg80211_radar_event()
802 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_cac_event() local
828 nl80211_radar_notify(rdev, chandef, event, netdev, gfp); cfg80211_cac_event()
H A Dibss.c14 #include "rdev-ops.h"
61 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_ibss_joined() local
81 queue_work(cfg80211_wq, &rdev->event_work); cfg80211_ibss_joined()
85 static int __cfg80211_join_ibss(struct cfg80211_registered_device *rdev, __cfg80211_join_ibss() argument
105 rdev->wiphy.bands[params->chandef.chan->band]; __cfg80211_join_ibss()
127 err = rdev_join_ibss(rdev, dev, params); __cfg80211_join_ibss()
139 int cfg80211_join_ibss(struct cfg80211_registered_device *rdev, cfg80211_join_ibss() argument
150 err = __cfg80211_join_ibss(rdev, dev, params, connkeys); cfg80211_join_ibss()
159 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); __cfg80211_clear_ibss() local
167 rdev_set_qos_map(rdev, dev, NULL); __cfg80211_clear_ibss()
173 if (rdev->ops->del_key) __cfg80211_clear_ibss()
175 rdev_del_key(rdev, dev, i, false, NULL); __cfg80211_clear_ibss()
200 int __cfg80211_leave_ibss(struct cfg80211_registered_device *rdev, __cfg80211_leave_ibss() argument
211 err = rdev_leave_ibss(rdev, dev); __cfg80211_leave_ibss()
221 int cfg80211_leave_ibss(struct cfg80211_registered_device *rdev, cfg80211_leave_ibss() argument
228 err = __cfg80211_leave_ibss(rdev, dev, nowext); cfg80211_leave_ibss()
235 int cfg80211_ibss_wext_join(struct cfg80211_registered_device *rdev, cfg80211_ibss_wext_join() argument
255 sband = rdev->wiphy.bands[band]; cfg80211_ibss_wext_join()
301 err = __cfg80211_join_ibss(rdev, wdev->netdev, cfg80211_ibss_wext_join()
314 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_ibss_wext_siwfreq() local
322 if (!rdev->ops->join_ibss) cfg80211_ibss_wext_siwfreq()
344 err = __cfg80211_leave_ibss(rdev, dev, true); cfg80211_ibss_wext_siwfreq()
360 err = cfg80211_ibss_wext_join(rdev, wdev); cfg80211_ibss_wext_siwfreq()
399 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_ibss_wext_siwessid() local
407 if (!rdev->ops->join_ibss) cfg80211_ibss_wext_siwessid()
413 err = __cfg80211_leave_ibss(rdev, dev, true); cfg80211_ibss_wext_siwessid()
428 err = cfg80211_ibss_wext_join(rdev, wdev); cfg80211_ibss_wext_siwessid()
466 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_ibss_wext_siwap() local
474 if (!rdev->ops->join_ibss) cfg80211_ibss_wext_siwap()
499 err = __cfg80211_leave_ibss(rdev, dev, true); cfg80211_ibss_wext_siwap()
512 err = cfg80211_ibss_wext_join(rdev, wdev); cfg80211_ibss_wext_siwap()
H A Dscan.c21 #include "rdev-ops.h"
85 static inline void bss_ref_get(struct cfg80211_registered_device *rdev, bss_ref_get() argument
88 lockdep_assert_held(&rdev->bss_lock); bss_ref_get()
99 static inline void bss_ref_put(struct cfg80211_registered_device *rdev, bss_ref_put() argument
102 lockdep_assert_held(&rdev->bss_lock); bss_ref_put()
118 static bool __cfg80211_unlink_bss(struct cfg80211_registered_device *rdev, __cfg80211_unlink_bss() argument
121 lockdep_assert_held(&rdev->bss_lock); __cfg80211_unlink_bss()
138 rb_erase(&bss->rbn, &rdev->bss_tree); __cfg80211_unlink_bss()
139 bss_ref_put(rdev, bss); __cfg80211_unlink_bss()
143 static void __cfg80211_bss_expire(struct cfg80211_registered_device *rdev, __cfg80211_bss_expire() argument
149 lockdep_assert_held(&rdev->bss_lock); __cfg80211_bss_expire()
151 list_for_each_entry_safe(bss, tmp, &rdev->bss_list, list) { __cfg80211_bss_expire()
157 if (__cfg80211_unlink_bss(rdev, bss)) __cfg80211_bss_expire()
162 rdev->bss_generation++; __cfg80211_bss_expire()
165 void ___cfg80211_scan_done(struct cfg80211_registered_device *rdev, ___cfg80211_scan_done() argument
177 if (rdev->scan_msg) { ___cfg80211_scan_done()
178 nl80211_send_scan_result(rdev, rdev->scan_msg); ___cfg80211_scan_done()
179 rdev->scan_msg = NULL; ___cfg80211_scan_done()
183 request = rdev->scan_req; ___cfg80211_scan_done()
200 spin_lock_bh(&rdev->bss_lock); ___cfg80211_scan_done()
201 __cfg80211_bss_expire(rdev, request->scan_start); ___cfg80211_scan_done()
202 spin_unlock_bh(&rdev->bss_lock); ___cfg80211_scan_done()
205 msg = nl80211_build_scan_msg(rdev, wdev, request->aborted); ___cfg80211_scan_done()
218 rdev->scan_req = NULL; ___cfg80211_scan_done()
222 rdev->scan_msg = msg; ___cfg80211_scan_done()
224 nl80211_send_scan_result(rdev, msg); ___cfg80211_scan_done()
229 struct cfg80211_registered_device *rdev; __cfg80211_scan_done() local
231 rdev = container_of(wk, struct cfg80211_registered_device, __cfg80211_scan_done()
235 ___cfg80211_scan_done(rdev, true); __cfg80211_scan_done()
252 struct cfg80211_registered_device *rdev; __cfg80211_sched_scan_results() local
255 rdev = container_of(wk, struct cfg80211_registered_device, __cfg80211_sched_scan_results()
260 request = rtnl_dereference(rdev->sched_scan_req); __cfg80211_sched_scan_results()
266 spin_lock_bh(&rdev->bss_lock); __cfg80211_sched_scan_results()
267 __cfg80211_bss_expire(rdev, request->scan_start); __cfg80211_sched_scan_results()
268 spin_unlock_bh(&rdev->bss_lock); __cfg80211_sched_scan_results()
271 nl80211_send_sched_scan_results(rdev, request->dev); __cfg80211_sched_scan_results()
290 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_sched_scan_stopped_rtnl() local
296 __cfg80211_stop_sched_scan(rdev, true); cfg80211_sched_scan_stopped_rtnl()
308 int __cfg80211_stop_sched_scan(struct cfg80211_registered_device *rdev, __cfg80211_stop_sched_scan() argument
316 if (!rdev->sched_scan_req) __cfg80211_stop_sched_scan()
319 sched_scan_req = rtnl_dereference(rdev->sched_scan_req); __cfg80211_stop_sched_scan()
323 int err = rdev_sched_scan_stop(rdev, dev); __cfg80211_stop_sched_scan()
328 nl80211_send_sched_scan(rdev, dev, NL80211_CMD_SCHED_SCAN_STOPPED); __cfg80211_stop_sched_scan()
330 RCU_INIT_POINTER(rdev->sched_scan_req, NULL); __cfg80211_stop_sched_scan()
336 void cfg80211_bss_age(struct cfg80211_registered_device *rdev, cfg80211_bss_age() argument
342 spin_lock_bh(&rdev->bss_lock); cfg80211_bss_age()
343 list_for_each_entry(bss, &rdev->bss_list, list) cfg80211_bss_age()
345 spin_unlock_bh(&rdev->bss_lock); cfg80211_bss_age()
348 void cfg80211_bss_expire(struct cfg80211_registered_device *rdev) cfg80211_bss_expire() argument
350 __cfg80211_bss_expire(rdev, jiffies - IEEE80211_SCAN_RESULT_EXPIRE); cfg80211_bss_expire()
587 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_get_bss() local
595 spin_lock_bh(&rdev->bss_lock); cfg80211_get_bss()
597 list_for_each_entry(bss, &rdev->bss_list, list) { cfg80211_get_bss()
616 bss_ref_get(rdev, res); cfg80211_get_bss()
621 spin_unlock_bh(&rdev->bss_lock); cfg80211_get_bss()
629 static void rb_insert_bss(struct cfg80211_registered_device *rdev, rb_insert_bss() argument
632 struct rb_node **p = &rdev->bss_tree.rb_node; rb_insert_bss()
655 rb_insert_color(&bss->rbn, &rdev->bss_tree); rb_insert_bss()
659 rb_find_bss(struct cfg80211_registered_device *rdev, rb_find_bss() argument
663 struct rb_node *n = rdev->bss_tree.rb_node; rb_find_bss()
682 static bool cfg80211_combine_bsses(struct cfg80211_registered_device *rdev, cfg80211_combine_bsses() argument
712 list_for_each_entry(bss, &rdev->bss_list, list) { cfg80211_combine_bsses()
746 cfg80211_bss_update(struct cfg80211_registered_device *rdev, cfg80211_bss_update() argument
757 spin_lock_bh(&rdev->bss_lock); cfg80211_bss_update()
760 spin_unlock_bh(&rdev->bss_lock); cfg80211_bss_update()
764 found = rb_find_bss(rdev, tmp, BSS_CMP_REGULAR); cfg80211_bss_update()
852 new = kzalloc(sizeof(*new) + rdev->wiphy.bss_priv_size, cfg80211_bss_update()
868 hidden = rb_find_bss(rdev, tmp, BSS_CMP_HIDE_ZLEN); cfg80211_bss_update()
870 hidden = rb_find_bss(rdev, tmp, cfg80211_bss_update()
887 if (!cfg80211_combine_bsses(rdev, new)) { cfg80211_bss_update()
893 list_add_tail(&new->list, &rdev->bss_list); cfg80211_bss_update()
894 rb_insert_bss(rdev, new); cfg80211_bss_update()
898 rdev->bss_generation++; cfg80211_bss_update()
899 bss_ref_get(rdev, found); cfg80211_bss_update()
900 spin_unlock_bh(&rdev->bss_lock); cfg80211_bss_update()
904 spin_unlock_bh(&rdev->bss_lock); cfg80211_bss_update()
1110 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_ref_bss() local
1118 spin_lock_bh(&rdev->bss_lock); cfg80211_ref_bss()
1119 bss_ref_get(rdev, bss); cfg80211_ref_bss()
1120 spin_unlock_bh(&rdev->bss_lock); cfg80211_ref_bss()
1126 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_put_bss() local
1134 spin_lock_bh(&rdev->bss_lock); cfg80211_put_bss()
1135 bss_ref_put(rdev, bss); cfg80211_put_bss()
1136 spin_unlock_bh(&rdev->bss_lock); cfg80211_put_bss()
1142 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_unlink_bss() local
1150 spin_lock_bh(&rdev->bss_lock); cfg80211_unlink_bss()
1152 if (__cfg80211_unlink_bss(rdev, bss)) cfg80211_unlink_bss()
1153 rdev->bss_generation++; cfg80211_unlink_bss()
1155 spin_unlock_bh(&rdev->bss_lock); cfg80211_unlink_bss()
1163 struct cfg80211_registered_device *rdev; cfg80211_get_dev_from_ifindex() local
1172 rdev = wiphy_to_rdev(dev->ieee80211_ptr->wiphy); cfg80211_get_dev_from_ifindex()
1174 rdev = ERR_PTR(-ENODEV); cfg80211_get_dev_from_ifindex()
1176 return rdev; cfg80211_get_dev_from_ifindex()
1183 struct cfg80211_registered_device *rdev; cfg80211_wext_siwscan() local
1196 rdev = cfg80211_get_dev_from_ifindex(dev_net(dev), dev->ifindex); cfg80211_wext_siwscan()
1198 if (IS_ERR(rdev)) cfg80211_wext_siwscan()
1199 return PTR_ERR(rdev); cfg80211_wext_siwscan()
1201 if (rdev->scan_req || rdev->scan_msg) { cfg80211_wext_siwscan()
1206 wiphy = &rdev->wiphy; cfg80211_wext_siwscan()
1296 rdev->scan_req = creq; cfg80211_wext_siwscan()
1297 err = rdev_scan(rdev, creq); cfg80211_wext_siwscan()
1299 rdev->scan_req = NULL; cfg80211_wext_siwscan()
1302 nl80211_send_scan_start(rdev, dev->ieee80211_ptr); cfg80211_wext_siwscan()
1620 static int ieee80211_scan_results(struct cfg80211_registered_device *rdev, ieee80211_scan_results() argument
1629 spin_lock_bh(&rdev->bss_lock); ieee80211_scan_results()
1630 cfg80211_bss_expire(rdev); ieee80211_scan_results()
1632 list_for_each_entry(bss, &rdev->bss_list, list) { ieee80211_scan_results()
1637 current_ev = ieee80211_bss(&rdev->wiphy, info, bss, ieee80211_scan_results()
1644 spin_unlock_bh(&rdev->bss_lock); ieee80211_scan_results()
1656 struct cfg80211_registered_device *rdev; cfg80211_wext_giwscan() local
1662 rdev = cfg80211_get_dev_from_ifindex(dev_net(dev), dev->ifindex); cfg80211_wext_giwscan()
1664 if (IS_ERR(rdev)) cfg80211_wext_giwscan()
1665 return PTR_ERR(rdev); cfg80211_wext_giwscan()
1667 if (rdev->scan_req || rdev->scan_msg) cfg80211_wext_giwscan()
1670 res = ieee80211_scan_results(rdev, info, extra, data->length); cfg80211_wext_giwscan()
H A Dnl80211.c28 #include "rdev-ops.h"
30 static int nl80211_crypto_settings(struct cfg80211_registered_device *rdev,
77 struct cfg80211_registered_device *rdev; __cfg80211_wdev_from_attrs() local
97 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { __cfg80211_wdev_from_attrs()
100 if (wiphy_net(&rdev->wiphy) != netns) __cfg80211_wdev_from_attrs()
103 if (have_wdev_id && rdev->wiphy_idx != wiphy_idx) __cfg80211_wdev_from_attrs()
106 list_for_each_entry(wdev, &rdev->wdev_list, list) { __cfg80211_wdev_from_attrs()
130 struct cfg80211_registered_device *rdev = NULL, *tmp; __cfg80211_rdev_from_attrs() local
141 rdev = cfg80211_rdev_by_wiphy_idx( __cfg80211_rdev_from_attrs()
162 if (rdev && tmp != rdev) __cfg80211_rdev_from_attrs()
164 rdev = tmp; __cfg80211_rdev_from_attrs()
183 if (rdev && tmp != rdev) __cfg80211_rdev_from_attrs()
186 rdev = tmp; __cfg80211_rdev_from_attrs()
190 if (!rdev) __cfg80211_rdev_from_attrs()
193 if (netns != wiphy_net(&rdev->wiphy)) __cfg80211_rdev_from_attrs()
196 return rdev; __cfg80211_rdev_from_attrs()
490 struct cfg80211_registered_device **rdev, nl80211_prepare_wdev_dump()
510 *rdev = wiphy_to_rdev((*wdev)->wiphy); nl80211_prepare_wdev_dump()
512 cb->args[0] = (*rdev)->wiphy_idx + 1; nl80211_prepare_wdev_dump()
523 *rdev = wiphy_to_rdev(wiphy); nl80211_prepare_wdev_dump()
526 list_for_each_entry(tmp, &(*rdev)->wdev_list, list) { nl80211_prepare_wdev_dump()
545 static void nl80211_finish_wdev_dump(struct cfg80211_registered_device *rdev) nl80211_finish_wdev_dump() argument
826 nl80211_parse_connkeys(struct cfg80211_registered_device *rdev, nl80211_parse_connkeys() argument
862 err = cfg80211_validate_key_settings(rdev, &parse.p, nla_for_each_nested()
1017 static int nl80211_send_wowlan_tcp_caps(struct cfg80211_registered_device *rdev, nl80211_send_wowlan_tcp_caps() argument
1020 const struct wiphy_wowlan_tcp_support *tcp = rdev->wiphy.wowlan->tcp; nl80211_send_wowlan_tcp_caps()
1058 struct cfg80211_registered_device *rdev, nl80211_send_wowlan()
1063 if (!rdev->wiphy.wowlan) nl80211_send_wowlan()
1070 if (((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_ANY) && nl80211_send_wowlan()
1072 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_DISCONNECT) && nl80211_send_wowlan()
1074 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_MAGIC_PKT) && nl80211_send_wowlan()
1076 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_SUPPORTS_GTK_REKEY) && nl80211_send_wowlan()
1078 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_GTK_REKEY_FAILURE) && nl80211_send_wowlan()
1080 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_EAP_IDENTITY_REQ) && nl80211_send_wowlan()
1082 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_4WAY_HANDSHAKE) && nl80211_send_wowlan()
1084 ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_RFKILL_RELEASE) && nl80211_send_wowlan()
1088 if (rdev->wiphy.wowlan->n_patterns) { nl80211_send_wowlan()
1090 .max_patterns = rdev->wiphy.wowlan->n_patterns, nl80211_send_wowlan()
1091 .min_pattern_len = rdev->wiphy.wowlan->pattern_min_len, nl80211_send_wowlan()
1092 .max_pattern_len = rdev->wiphy.wowlan->pattern_max_len, nl80211_send_wowlan()
1093 .max_pkt_offset = rdev->wiphy.wowlan->max_pkt_offset, nl80211_send_wowlan()
1101 if ((rdev->wiphy.wowlan->flags & WIPHY_WOWLAN_NET_DETECT) && nl80211_send_wowlan()
1103 rdev->wiphy.wowlan->max_nd_match_sets)) nl80211_send_wowlan()
1106 if (large && nl80211_send_wowlan_tcp_caps(rdev, msg)) nl80211_send_wowlan()
1116 struct cfg80211_registered_device *rdev) nl80211_send_coalesce()
1120 if (!rdev->wiphy.coalesce) nl80211_send_coalesce()
1123 rule.max_rules = rdev->wiphy.coalesce->n_rules; nl80211_send_coalesce()
1124 rule.max_delay = rdev->wiphy.coalesce->max_delay; nl80211_send_coalesce()
1125 rule.pat.max_patterns = rdev->wiphy.coalesce->n_patterns; nl80211_send_coalesce()
1126 rule.pat.min_pattern_len = rdev->wiphy.coalesce->pattern_min_len; nl80211_send_coalesce()
1127 rule.pat.max_pattern_len = rdev->wiphy.coalesce->pattern_max_len; nl80211_send_coalesce()
1128 rule.pat.max_pkt_offset = rdev->wiphy.coalesce->max_pkt_offset; nl80211_send_coalesce()
1259 static int nl80211_send_wiphy(struct cfg80211_registered_device *rdev, nl80211_send_wiphy() argument
1272 rdev->wiphy.mgmt_stypes; nl80211_send_wiphy()
1282 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_wiphy()
1284 wiphy_name(&rdev->wiphy)) || nl80211_send_wiphy()
1295 rdev->wiphy.retry_short) || nl80211_send_wiphy()
1297 rdev->wiphy.retry_long) || nl80211_send_wiphy()
1299 rdev->wiphy.frag_threshold) || nl80211_send_wiphy()
1301 rdev->wiphy.rts_threshold) || nl80211_send_wiphy()
1303 rdev->wiphy.coverage_class) || nl80211_send_wiphy()
1305 rdev->wiphy.max_scan_ssids) || nl80211_send_wiphy()
1307 rdev->wiphy.max_sched_scan_ssids) || nl80211_send_wiphy()
1309 rdev->wiphy.max_scan_ie_len) || nl80211_send_wiphy()
1311 rdev->wiphy.max_sched_scan_ie_len) || nl80211_send_wiphy()
1313 rdev->wiphy.max_match_sets) || nl80211_send_wiphy()
1315 rdev->wiphy.max_sched_scan_plans) || nl80211_send_wiphy()
1317 rdev->wiphy.max_sched_scan_plan_interval) || nl80211_send_wiphy()
1319 rdev->wiphy.max_sched_scan_plan_iterations)) nl80211_send_wiphy()
1322 if ((rdev->wiphy.flags & WIPHY_FLAG_IBSS_RSN) && nl80211_send_wiphy()
1325 if ((rdev->wiphy.flags & WIPHY_FLAG_MESH_AUTH) && nl80211_send_wiphy()
1328 if ((rdev->wiphy.flags & WIPHY_FLAG_AP_UAPSD) && nl80211_send_wiphy()
1331 if ((rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_FW_ROAM) && nl80211_send_wiphy()
1334 if ((rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_TDLS) && nl80211_send_wiphy()
1337 if ((rdev->wiphy.flags & WIPHY_FLAG_TDLS_EXTERNAL_SETUP) && nl80211_send_wiphy()
1345 sizeof(u32) * rdev->wiphy.n_cipher_suites, nl80211_send_wiphy()
1346 rdev->wiphy.cipher_suites)) nl80211_send_wiphy()
1350 rdev->wiphy.max_num_pmkids)) nl80211_send_wiphy()
1353 if ((rdev->wiphy.flags & WIPHY_FLAG_CONTROL_PORT_PROTOCOL) && nl80211_send_wiphy()
1358 rdev->wiphy.available_antennas_tx) || nl80211_send_wiphy()
1360 rdev->wiphy.available_antennas_rx)) nl80211_send_wiphy()
1363 if ((rdev->wiphy.flags & WIPHY_FLAG_AP_PROBE_RESP_OFFLOAD) && nl80211_send_wiphy()
1365 rdev->wiphy.probe_resp_offload)) nl80211_send_wiphy()
1368 if ((rdev->wiphy.available_antennas_tx || nl80211_send_wiphy()
1369 rdev->wiphy.available_antennas_rx) && nl80211_send_wiphy()
1370 rdev->ops->get_antenna) { nl80211_send_wiphy()
1373 res = rdev_get_antenna(rdev, &tx_ant, &rx_ant); nl80211_send_wiphy()
1390 rdev->wiphy.interface_modes)) nl80211_send_wiphy()
1404 sband = rdev->wiphy.bands[band]; nl80211_send_wiphy()
1481 if (rdev->ops->op) { \ nl80211_send_wiphy()
1505 if (rdev->wiphy.flags & WIPHY_FLAG_HAS_REMAIN_ON_CHANNEL) nl80211_send_wiphy()
1510 if (rdev->wiphy.flags & WIPHY_FLAG_NETNS_OK) { nl80211_send_wiphy()
1515 if (rdev->ops->set_monitor_channel || rdev->ops->start_ap || nl80211_send_wiphy()
1516 rdev->ops->join_mesh) { nl80211_send_wiphy()
1522 if (rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_TDLS) { nl80211_send_wiphy()
1526 if (rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_SCHED_SCAN) nl80211_send_wiphy()
1530 if (rdev->wiphy.flags & WIPHY_FLAG_REPORTS_OBSS) { nl80211_send_wiphy()
1543 if (rdev->wiphy.flags & WIPHY_FLAG_HAS_CHANNEL_SWITCH) nl80211_send_wiphy()
1546 if (rdev->wiphy.features & nl80211_send_wiphy()
1553 if (rdev->ops->connect || rdev->ops->auth) { nl80211_send_wiphy()
1559 if (rdev->ops->disconnect || rdev->ops->deauth) { nl80211_send_wiphy()
1570 if (rdev->ops->remain_on_channel && nl80211_send_wiphy()
1571 (rdev->wiphy.flags & WIPHY_FLAG_HAS_REMAIN_ON_CHANNEL) && nl80211_send_wiphy()
1574 rdev->wiphy.max_remain_on_channel_duration)) nl80211_send_wiphy()
1577 if ((rdev->wiphy.flags & WIPHY_FLAG_OFFCHAN_TX) && nl80211_send_wiphy()
1588 if (nl80211_send_wowlan(msg, rdev, state->split)) nl80211_send_wiphy()
1598 rdev->wiphy.software_iftypes)) nl80211_send_wiphy()
1601 if (nl80211_put_iface_combinations(&rdev->wiphy, msg, nl80211_send_wiphy()
1609 if ((rdev->wiphy.flags & WIPHY_FLAG_HAVE_AP_SME) && nl80211_send_wiphy()
1611 rdev->wiphy.ap_sme_capa)) nl80211_send_wiphy()
1614 features = rdev->wiphy.features; nl80211_send_wiphy()
1625 if (rdev->wiphy.ht_capa_mod_mask && nl80211_send_wiphy()
1627 sizeof(*rdev->wiphy.ht_capa_mod_mask), nl80211_send_wiphy()
1628 rdev->wiphy.ht_capa_mod_mask)) nl80211_send_wiphy()
1631 if (rdev->wiphy.flags & WIPHY_FLAG_HAVE_AP_SME && nl80211_send_wiphy()
1632 rdev->wiphy.max_acl_mac_addrs && nl80211_send_wiphy()
1634 rdev->wiphy.max_acl_mac_addrs)) nl80211_send_wiphy()
1650 if (rdev->wiphy.extended_capabilities && nl80211_send_wiphy()
1652 rdev->wiphy.extended_capabilities_len, nl80211_send_wiphy()
1653 rdev->wiphy.extended_capabilities) || nl80211_send_wiphy()
1655 rdev->wiphy.extended_capabilities_len, nl80211_send_wiphy()
1656 rdev->wiphy.extended_capabilities_mask))) nl80211_send_wiphy()
1659 if (rdev->wiphy.vht_capa_mod_mask && nl80211_send_wiphy()
1661 sizeof(*rdev->wiphy.vht_capa_mod_mask), nl80211_send_wiphy()
1662 rdev->wiphy.vht_capa_mod_mask)) nl80211_send_wiphy()
1668 if (nl80211_send_coalesce(msg, rdev)) nl80211_send_wiphy()
1671 if ((rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_5_10_MHZ) && nl80211_send_wiphy()
1676 if (rdev->wiphy.max_ap_assoc_sta && nl80211_send_wiphy()
1678 rdev->wiphy.max_ap_assoc_sta)) nl80211_send_wiphy()
1684 if (rdev->wiphy.n_vendor_commands) { nl80211_send_wiphy()
1692 for (i = 0; i < rdev->wiphy.n_vendor_commands; i++) { nl80211_send_wiphy()
1693 info = &rdev->wiphy.vendor_commands[i].info; nl80211_send_wiphy()
1700 if (rdev->wiphy.n_vendor_events) { nl80211_send_wiphy()
1709 for (i = 0; i < rdev->wiphy.n_vendor_events; i++) { nl80211_send_wiphy()
1710 info = &rdev->wiphy.vendor_events[i]; nl80211_send_wiphy()
1719 if (rdev->wiphy.flags & WIPHY_FLAG_HAS_CHANNEL_SWITCH && nl80211_send_wiphy()
1721 rdev->wiphy.max_num_csa_counters)) nl80211_send_wiphy()
1724 if (rdev->wiphy.regulatory_flags & REGULATORY_WIPHY_SELF_MANAGED && nl80211_send_wiphy()
1729 sizeof(rdev->wiphy.ext_features), nl80211_send_wiphy()
1730 rdev->wiphy.ext_features)) nl80211_send_wiphy()
1764 struct cfg80211_registered_device *rdev; nl80211_dump_wiphy_parse() local
1771 rdev = wiphy_to_rdev( nl80211_dump_wiphy_parse()
1773 state->filter_wiphy = rdev->wiphy_idx; nl80211_dump_wiphy_parse()
1784 struct cfg80211_registered_device *rdev; nl80211_dump_wiphy() local
1803 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { nl80211_dump_wiphy()
1804 if (!net_eq(wiphy_net(&rdev->wiphy), sock_net(skb->sk))) nl80211_dump_wiphy()
1809 state->filter_wiphy != rdev->wiphy_idx) nl80211_dump_wiphy()
1813 ret = nl80211_send_wiphy(rdev, NL80211_CMD_NEW_WIPHY, nl80211_dump_wiphy()
1862 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_wiphy() local
1869 if (nl80211_send_wiphy(rdev, NL80211_CMD_NEW_WIPHY, msg, nl80211_get_wiphy()
1931 static int nl80211_parse_chandef(struct cfg80211_registered_device *rdev, nl80211_parse_chandef() argument
1942 chandef->chan = ieee80211_get_channel(&rdev->wiphy, control_freq); nl80211_parse_chandef()
1984 if (!cfg80211_chandef_usable(&rdev->wiphy, chandef, nl80211_parse_chandef()
1990 !(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_5_10_MHZ)) nl80211_parse_chandef()
1996 static int __nl80211_set_channel(struct cfg80211_registered_device *rdev, __nl80211_set_channel() argument
2012 result = nl80211_parse_chandef(rdev, info, &chandef); __nl80211_set_channel()
2019 if (!cfg80211_reg_can_beacon_relax(&rdev->wiphy, &chandef, __nl80211_set_channel()
2025 if (!dev || !rdev->ops->set_ap_chanwidth || __nl80211_set_channel()
2026 !(rdev->wiphy.features & __nl80211_set_channel()
2037 result = rdev_set_ap_chanwidth(rdev, dev, &chandef); __nl80211_set_channel()
2045 result = cfg80211_set_mesh_channel(rdev, wdev, &chandef); __nl80211_set_channel()
2048 result = cfg80211_set_monitor_channel(rdev, &chandef); __nl80211_set_channel()
2059 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_channel() local
2062 return __nl80211_set_channel(rdev, netdev, info); nl80211_set_channel()
2067 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_wds_peer() local
2078 if (!rdev->ops->set_wds_peer) nl80211_set_wds_peer()
2085 return rdev_set_wds_peer(rdev, dev, bssid); nl80211_set_wds_peer()
2091 struct cfg80211_registered_device *rdev; nl80211_set_wiphy() local
2118 rdev = wiphy_to_rdev(netdev->ieee80211_ptr->wiphy); nl80211_set_wiphy()
2124 rdev = __cfg80211_rdev_from_attrs(genl_info_net(info), nl80211_set_wiphy()
2126 if (IS_ERR(rdev)) nl80211_set_wiphy()
2127 return PTR_ERR(rdev); nl80211_set_wiphy()
2135 * end workaround code, by now the rdev is available nl80211_set_wiphy()
2141 rdev, nla_data(info->attrs[NL80211_ATTR_WIPHY_NAME])); nl80211_set_wiphy()
2150 if (!rdev->ops->set_txq_params) nl80211_set_wiphy()
2176 result = rdev_set_txq_params(rdev, netdev, nl80211_set_wiphy()
2185 rdev, nl80211_set_wiphy()
2197 if (!(rdev->wiphy.features & NL80211_FEATURE_VIF_TXPOWER)) nl80211_set_wiphy()
2200 if (!rdev->ops->set_tx_power) nl80211_set_wiphy()
2215 result = rdev_set_tx_power(rdev, txp_wdev, type, mbm); nl80211_set_wiphy()
2223 if ((!rdev->wiphy.available_antennas_tx && nl80211_set_wiphy()
2224 !rdev->wiphy.available_antennas_rx) || nl80211_set_wiphy()
2225 !rdev->ops->set_antenna) nl80211_set_wiphy()
2233 if ((~tx_ant && (tx_ant & ~rdev->wiphy.available_antennas_tx)) || nl80211_set_wiphy()
2234 (~rx_ant && (rx_ant & ~rdev->wiphy.available_antennas_rx))) nl80211_set_wiphy()
2237 tx_ant = tx_ant & rdev->wiphy.available_antennas_tx; nl80211_set_wiphy()
2238 rx_ant = rx_ant & rdev->wiphy.available_antennas_rx; nl80211_set_wiphy()
2240 result = rdev_set_antenna(rdev, tx_ant, rx_ant); nl80211_set_wiphy()
2299 if (!(rdev->wiphy.features & NL80211_FEATURE_ACKTO_ESTIMATION)) nl80211_set_wiphy()
2310 if (!rdev->ops->set_wiphy_params) nl80211_set_wiphy()
2313 old_retry_short = rdev->wiphy.retry_short; nl80211_set_wiphy()
2314 old_retry_long = rdev->wiphy.retry_long; nl80211_set_wiphy()
2315 old_frag_threshold = rdev->wiphy.frag_threshold; nl80211_set_wiphy()
2316 old_rts_threshold = rdev->wiphy.rts_threshold; nl80211_set_wiphy()
2317 old_coverage_class = rdev->wiphy.coverage_class; nl80211_set_wiphy()
2320 rdev->wiphy.retry_short = retry_short; nl80211_set_wiphy()
2322 rdev->wiphy.retry_long = retry_long; nl80211_set_wiphy()
2324 rdev->wiphy.frag_threshold = frag_threshold; nl80211_set_wiphy()
2326 rdev->wiphy.rts_threshold = rts_threshold; nl80211_set_wiphy()
2328 rdev->wiphy.coverage_class = coverage_class; nl80211_set_wiphy()
2330 result = rdev_set_wiphy_params(rdev, changed); nl80211_set_wiphy()
2332 rdev->wiphy.retry_short = old_retry_short; nl80211_set_wiphy()
2333 rdev->wiphy.retry_long = old_retry_long; nl80211_set_wiphy()
2334 rdev->wiphy.frag_threshold = old_frag_threshold; nl80211_set_wiphy()
2335 rdev->wiphy.rts_threshold = old_rts_threshold; nl80211_set_wiphy()
2336 rdev->wiphy.coverage_class = old_coverage_class; nl80211_set_wiphy()
2380 struct cfg80211_registered_device *rdev, nl80211_send_iface()
2399 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_iface()
2404 rdev->devlist_generation ^ nl80211_send_iface()
2408 if (rdev->ops->get_channel) { nl80211_send_iface()
2412 ret = rdev_get_channel(rdev, wdev, &chandef); nl80211_send_iface()
2419 if (rdev->ops->get_tx_power) { nl80211_send_iface()
2422 ret = rdev_get_tx_power(rdev, wdev, &dbm); nl80211_send_iface()
2448 struct cfg80211_registered_device *rdev; nl80211_dump_interface() local
2452 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { nl80211_dump_interface()
2453 if (!net_eq(wiphy_net(&rdev->wiphy), sock_net(skb->sk))) nl80211_dump_interface()
2461 list_for_each_entry(wdev, &rdev->wdev_list, list) { nl80211_dump_interface()
2468 rdev, wdev, false) < 0) { nl80211_dump_interface()
2488 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_interface() local
2496 rdev, wdev, false) < 0) { nl80211_get_interface()
2534 static int nl80211_valid_4addr(struct cfg80211_registered_device *rdev, nl80211_valid_4addr() argument
2546 if (rdev->wiphy.flags & WIPHY_FLAG_4ADDR_AP) nl80211_valid_4addr()
2550 if (rdev->wiphy.flags & WIPHY_FLAG_4ADDR_STATION) nl80211_valid_4addr()
2562 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_interface() local
2603 err = nl80211_valid_4addr(rdev, dev, params.use_4addr, ntype); nl80211_set_interface()
2623 !(rdev->wiphy.features & NL80211_FEATURE_ACTIVE_MONITOR)) nl80211_set_interface()
2627 err = cfg80211_change_iface(rdev, dev, ntype, flags, &params); nl80211_set_interface()
2639 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_new_interface() local
2648 cfg80211_destroy_ifaces(rdev); nl80211_new_interface()
2661 if (!rdev->ops->add_virtual_intf || nl80211_new_interface()
2662 !(rdev->wiphy.interface_modes & (1 << type))) nl80211_new_interface()
2666 rdev->wiphy.features & NL80211_FEATURE_MAC_ON_CREATE) && nl80211_new_interface()
2676 err = nl80211_valid_4addr(rdev, NULL, params.use_4addr, type); nl80211_new_interface()
2686 !(rdev->wiphy.features & NL80211_FEATURE_ACTIVE_MONITOR)) nl80211_new_interface()
2693 wdev = rdev_add_virtual_intf(rdev, nl80211_new_interface()
2732 wdev->identifier = ++rdev->wdev_id; nl80211_new_interface()
2733 list_add_rcu(&wdev->list, &rdev->wdev_list); nl80211_new_interface()
2734 rdev->devlist_generation++; nl80211_new_interface()
2741 rdev, wdev, false) < 0) { nl80211_new_interface()
2749 rdev, wdev, false) < 0) { nl80211_new_interface()
2754 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), nl80211_new_interface()
2765 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_del_interface() local
2770 if (!rdev->ops->del_virtual_intf) nl80211_del_interface()
2774 if (msg && nl80211_send_iface(msg, 0, 0, 0, rdev, wdev, true) < 0) { nl80211_del_interface()
2789 status = rdev_del_virtual_intf(rdev, wdev); nl80211_del_interface()
2791 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), nl80211_del_interface()
2802 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_noack_map() local
2809 if (!rdev->ops->set_noack_map) nl80211_set_noack_map()
2814 return rdev_set_noack_map(rdev, dev, noack_map); nl80211_set_noack_map()
2866 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_key() local
2898 if (!rdev->ops->get_key) nl80211_get_key()
2901 if (!pairwise && mac_addr && !(rdev->wiphy.flags & WIPHY_FLAG_IBSS_RSN)) nl80211_get_key()
2923 err = rdev_get_key(rdev, dev, key_idx, pairwise, mac_addr, &cookie, nl80211_get_key()
2944 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_key() local
2963 if (!rdev->ops->set_default_key) { nl80211_set_key()
2972 err = rdev_set_default_key(rdev, dev, key.idx, nl80211_set_key()
2987 if (!rdev->ops->set_default_mgmt_key) { nl80211_set_key()
2996 err = rdev_set_default_mgmt_key(rdev, dev, key.idx); nl80211_set_key()
3013 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_new_key() local
3041 if (!rdev->ops->add_key) nl80211_new_key()
3044 if (cfg80211_validate_key_settings(rdev, &key.p, key.idx, nl80211_new_key()
3052 err = rdev_add_key(rdev, dev, key.idx, nl80211_new_key()
3062 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_del_key() local
3087 if (!rdev->ops->del_key) nl80211_del_key()
3094 !(rdev->wiphy.flags & WIPHY_FLAG_IBSS_RSN)) nl80211_del_key()
3098 err = rdev_del_key(rdev, dev, key.idx, nl80211_del_key()
3183 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_mac_acl() local
3195 acl = parse_acl_data(&rdev->wiphy, info); nl80211_set_mac_acl()
3199 err = rdev_set_mac_acl(rdev, dev, acl); nl80211_set_mac_acl()
3263 static bool nl80211_get_ap_channel(struct cfg80211_registered_device *rdev, nl80211_get_ap_channel() argument
3269 list_for_each_entry(wdev, &rdev->wdev_list, list) { nl80211_get_ap_channel()
3285 static bool nl80211_valid_auth_type(struct cfg80211_registered_device *rdev, nl80211_valid_auth_type() argument
3294 if (!(rdev->wiphy.features & NL80211_FEATURE_SAE) && nl80211_valid_auth_type()
3311 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_start_ap() local
3321 if (!rdev->ops->start_ap) nl80211_start_ap()
3344 err = cfg80211_validate_beacon_int(rdev, params.beacon_interval); nl80211_start_ap()
3378 if (!nl80211_valid_auth_type(rdev, params.auth_type, nl80211_start_ap()
3384 err = nl80211_crypto_settings(rdev, info, &params.crypto, nl80211_start_ap()
3390 if (!(rdev->wiphy.features & NL80211_FEATURE_INACTIVITY_TIMER)) nl80211_start_ap()
3404 !(rdev->wiphy.features & NL80211_FEATURE_P2P_GO_CTWIN)) nl80211_start_ap()
3418 !(rdev->wiphy.features & NL80211_FEATURE_P2P_GO_OPPPS)) nl80211_start_ap()
3423 err = nl80211_parse_chandef(rdev, info, &params.chandef); nl80211_start_ap()
3428 } else if (!nl80211_get_ap_channel(rdev, &params)) nl80211_start_ap()
3431 if (!cfg80211_reg_can_beacon_relax(&rdev->wiphy, &params.chandef, nl80211_start_ap()
3442 if (!(rdev->wiphy.features & nl80211_start_ap()
3447 if (!(rdev->wiphy.features & nl80211_start_ap()
3459 params.acl = parse_acl_data(&rdev->wiphy, info); nl80211_start_ap()
3465 err = rdev_start_ap(rdev, dev, &params); nl80211_start_ap()
3482 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_beacon() local
3492 if (!rdev->ops->change_beacon) nl80211_set_beacon()
3503 err = rdev_change_beacon(rdev, dev, &params); nl80211_set_beacon()
3511 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_stop_ap() local
3514 return cfg80211_stop_ap(rdev, dev, false); nl80211_stop_ap()
3701 struct cfg80211_registered_device *rdev, nl80211_send_station()
3749 switch (rdev->wiphy.signal_type) { nl80211_send_station()
3881 struct cfg80211_registered_device *rdev; nl80211_dump_station() local
3887 err = nl80211_prepare_wdev_dump(skb, cb, &rdev, &wdev); nl80211_dump_station()
3896 if (!rdev->ops->dump_station) { nl80211_dump_station()
3903 err = rdev_dump_station(rdev, wdev->netdev, sta_idx, nl80211_dump_station()
3913 rdev, wdev->netdev, mac_addr, nl80211_dump_station()
3925 nl80211_finish_wdev_dump(rdev); nl80211_dump_station()
3932 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_station() local
3946 if (!rdev->ops->get_station) nl80211_get_station()
3949 err = rdev_get_station(rdev, dev, mac_addr, &sinfo); nl80211_get_station()
3959 rdev, dev, mac_addr, &sinfo) < 0) { nl80211_get_station()
4109 struct cfg80211_registered_device *rdev) get_vlan()
4122 if (!v->ieee80211_ptr || v->ieee80211_ptr->wiphy != &rdev->wiphy) { get_vlan()
4243 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_station() local
4251 if (!rdev->ops->change_station) nl80211_set_station()
4327 params.vlan = get_vlan(info, rdev); nl80211_set_station()
4346 err = rdev_change_station(rdev, dev, mac_addr, &params); nl80211_set_station()
4357 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_new_station() local
4365 if (!rdev->ops->add_station) nl80211_new_station()
4459 if (!(rdev->wiphy.flags & WIPHY_FLAG_AP_UAPSD) || nl80211_new_station()
4471 if (!(rdev->wiphy.features & nl80211_new_station()
4479 params.vlan = get_vlan(info, rdev); nl80211_new_station()
4509 if (!(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_TDLS)) nl80211_new_station()
4512 if (!(rdev->wiphy.flags & WIPHY_FLAG_TDLS_EXTERNAL_SETUP)) nl80211_new_station()
4526 err = rdev_add_station(rdev, dev, mac_addr, &params); nl80211_new_station()
4535 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_del_station() local
4550 if (!rdev->ops->del_station) nl80211_del_station()
4574 return rdev_del_station(rdev, dev, &params); nl80211_del_station()
4635 struct cfg80211_registered_device *rdev; nl80211_dump_mpath() local
4642 err = nl80211_prepare_wdev_dump(skb, cb, &rdev, &wdev); nl80211_dump_mpath()
4646 if (!rdev->ops->dump_mpath) { nl80211_dump_mpath()
4657 err = rdev_dump_mpath(rdev, wdev->netdev, path_idx, dst, nl80211_dump_mpath()
4678 nl80211_finish_wdev_dump(rdev); nl80211_dump_mpath()
4684 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_mpath() local
4699 if (!rdev->ops->get_mpath) nl80211_get_mpath()
4705 err = rdev_get_mpath(rdev, dev, dst, next_hop, &pinfo); nl80211_get_mpath()
4724 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_mpath() local
4738 if (!rdev->ops->change_mpath) nl80211_set_mpath()
4744 return rdev_change_mpath(rdev, dev, dst, next_hop); nl80211_set_mpath()
4749 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_new_mpath() local
4763 if (!rdev->ops->add_mpath) nl80211_new_mpath()
4769 return rdev_add_mpath(rdev, dev, dst, next_hop); nl80211_new_mpath()
4774 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_del_mpath() local
4781 if (!rdev->ops->del_mpath) nl80211_del_mpath()
4784 return rdev_del_mpath(rdev, dev, dst); nl80211_del_mpath()
4789 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_mpp() local
4804 if (!rdev->ops->get_mpp) nl80211_get_mpp()
4810 err = rdev_get_mpp(rdev, dev, dst, mpp, &pinfo); nl80211_get_mpp()
4831 struct cfg80211_registered_device *rdev; nl80211_dump_mpp() local
4838 err = nl80211_prepare_wdev_dump(skb, cb, &rdev, &wdev); nl80211_dump_mpp()
4842 if (!rdev->ops->dump_mpp) { nl80211_dump_mpp()
4853 err = rdev_dump_mpp(rdev, wdev->netdev, path_idx, dst, nl80211_dump_mpp()
4873 nl80211_finish_wdev_dump(rdev); nl80211_dump_mpp()
4879 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_bss() local
4924 !(rdev->wiphy.features & NL80211_FEATURE_P2P_GO_CTWIN)) nl80211_set_bss()
4938 !(rdev->wiphy.features & NL80211_FEATURE_P2P_GO_OPPPS)) nl80211_set_bss()
4942 if (!rdev->ops->change_bss) nl80211_set_bss()
4950 err = rdev_change_bss(rdev, dev, &params); nl80211_set_bss()
5005 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_mesh_config() local
5017 if (!rdev->ops->get_mesh_config) nl80211_get_mesh_config()
5025 err = rdev_get_mesh_config(rdev, dev, &cur_params); nl80211_get_mesh_config()
5291 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_parse_mesh_setup() local
5329 !(rdev->wiphy.features & NL80211_FEATURE_USERSPACE_MPM)) nl80211_parse_mesh_setup()
5350 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_update_mesh_config() local
5360 if (!rdev->ops->update_mesh_config) nl80211_update_mesh_config()
5372 err = rdev_update_mesh_config(rdev, dev, mask, &cfg); nl80211_update_mesh_config()
5443 struct cfg80211_registered_device *rdev; nl80211_get_reg_do() local
5460 rdev = cfg80211_get_dev_from_info(genl_info_net(info), info); nl80211_get_reg_do()
5461 if (IS_ERR(rdev)) { nl80211_get_reg_do()
5463 return PTR_ERR(rdev); nl80211_get_reg_do()
5466 wiphy = &rdev->wiphy; nl80211_get_reg_do()
5549 struct cfg80211_registered_device *rdev; nl80211_get_reg_dump() local
5564 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { nl80211_get_reg_dump()
5565 regdom = get_wiphy_regdom(&rdev->wiphy); nl80211_get_reg_dump()
5573 NLM_F_MULTI, &rdev->wiphy, regdom); nl80211_get_reg_dump()
5780 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_trigger_scan() local
5791 wiphy = &rdev->wiphy; nl80211_trigger_scan()
5793 if (!rdev->ops->scan) nl80211_trigger_scan()
5796 if (rdev->scan_req || rdev->scan_msg) { nl80211_trigger_scan()
5980 request->wiphy = &rdev->wiphy; nl80211_trigger_scan()
5983 rdev->scan_req = request; nl80211_trigger_scan()
5984 err = rdev_scan(rdev, request); nl80211_trigger_scan()
5987 nl80211_send_scan_start(rdev, wdev); nl80211_trigger_scan()
5992 rdev->scan_req = NULL; nl80211_trigger_scan()
6413 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_start_sched_scan() local
6419 if (!(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_SCHED_SCAN) || nl80211_start_sched_scan()
6420 !rdev->ops->sched_scan_start) nl80211_start_sched_scan()
6423 if (rdev->sched_scan_req) nl80211_start_sched_scan()
6426 sched_scan_req = nl80211_parse_sched_scan(&rdev->wiphy, wdev, nl80211_start_sched_scan()
6433 err = rdev_sched_scan_start(rdev, dev, sched_scan_req); nl80211_start_sched_scan()
6438 sched_scan_req->wiphy = &rdev->wiphy; nl80211_start_sched_scan()
6443 rcu_assign_pointer(rdev->sched_scan_req, sched_scan_req); nl80211_start_sched_scan()
6445 nl80211_send_sched_scan(rdev, dev, nl80211_start_sched_scan()
6458 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_stop_sched_scan() local
6460 if (!(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_SCHED_SCAN) || nl80211_stop_sched_scan()
6461 !rdev->ops->sched_scan_stop) nl80211_stop_sched_scan()
6464 return __cfg80211_stop_sched_scan(rdev, false); nl80211_stop_sched_scan()
6470 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_start_radar_detection() local
6482 err = nl80211_parse_chandef(rdev, info, &chandef); nl80211_start_radar_detection()
6503 if (!rdev->ops->start_radar_detection) nl80211_start_radar_detection()
6506 cac_time_ms = cfg80211_chandef_dfs_cac_time(&rdev->wiphy, &chandef); nl80211_start_radar_detection()
6510 err = rdev->ops->start_radar_detection(&rdev->wiphy, dev, &chandef, nl80211_start_radar_detection()
6523 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_channel_switch() local
6536 if (!rdev->ops->channel_switch || nl80211_channel_switch()
6537 !(rdev->wiphy.flags & WIPHY_FLAG_HAS_CHANNEL_SWITCH)) nl80211_channel_switch()
6605 if (rdev->wiphy.max_num_csa_counters && nl80211_channel_switch()
6607 rdev->wiphy.max_num_csa_counters)) nl80211_channel_switch()
6630 if (rdev->wiphy.max_num_csa_counters && nl80211_channel_switch()
6632 rdev->wiphy.max_num_csa_counters)) nl80211_channel_switch()
6652 err = nl80211_parse_chandef(rdev, info, &params.chandef); nl80211_channel_switch()
6656 if (!cfg80211_reg_can_beacon_relax(&rdev->wiphy, &params.chandef, nl80211_channel_switch()
6673 err = rdev_channel_switch(rdev, dev, &params); nl80211_channel_switch()
6681 struct cfg80211_registered_device *rdev, nl80211_send_bss()
6699 if (nla_put_u32(msg, NL80211_ATTR_GENERATION, rdev->bss_generation)) nl80211_send_bss()
6758 switch (rdev->wiphy.signal_type) { nl80211_send_bss()
6803 struct cfg80211_registered_device *rdev; nl80211_dump_scan() local
6809 err = nl80211_prepare_wdev_dump(skb, cb, &rdev, &wdev); nl80211_dump_scan()
6814 spin_lock_bh(&rdev->bss_lock); nl80211_dump_scan()
6815 cfg80211_bss_expire(rdev); nl80211_dump_scan()
6817 cb->seq = rdev->bss_generation; nl80211_dump_scan()
6819 list_for_each_entry(scan, &rdev->bss_list, list) { nl80211_dump_scan()
6824 rdev, wdev, scan) < 0) { nl80211_dump_scan()
6830 spin_unlock_bh(&rdev->bss_lock); nl80211_dump_scan()
6834 nl80211_finish_wdev_dump(rdev); nl80211_dump_scan()
6912 struct cfg80211_registered_device *rdev; nl80211_dump_survey() local
6918 res = nl80211_prepare_wdev_dump(skb, cb, &rdev, &wdev); nl80211_dump_survey()
6930 if (!rdev->ops->dump_survey) { nl80211_dump_survey()
6936 res = rdev_dump_survey(rdev, wdev->netdev, survey_idx, &survey); nl80211_dump_survey()
6961 nl80211_finish_wdev_dump(rdev); nl80211_dump_survey()
6973 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_authenticate() local
7021 for (i = 0; i < rdev->wiphy.n_cipher_suites; i++) { nl80211_authenticate()
7022 if (key.p.cipher == rdev->wiphy.cipher_suites[i]) { nl80211_authenticate()
7031 if (!rdev->ops->auth) nl80211_authenticate()
7039 chan = nl80211_get_valid_chan(&rdev->wiphy, nl80211_authenticate()
7053 if (!nl80211_valid_auth_type(rdev, auth_type, NL80211_CMD_AUTHENTICATE)) nl80211_authenticate()
7080 err = cfg80211_mlme_auth(rdev, dev, chan, auth_type, bssid, nl80211_authenticate()
7088 static int nl80211_crypto_settings(struct cfg80211_registered_device *rdev, nl80211_crypto_settings() argument
7102 if (!(rdev->wiphy.flags & WIPHY_FLAG_CONTROL_PORT_PROTOCOL) && nl80211_crypto_settings()
7128 &rdev->wiphy, nl80211_crypto_settings()
7136 if (!cfg80211_supported_cipher_suite(&rdev->wiphy, nl80211_crypto_settings()
7170 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_associate() local
7185 if (!rdev->ops->assoc) nl80211_associate()
7194 chan = nl80211_get_valid_chan(&rdev->wiphy, nl80211_associate()
7252 if (!(rdev->wiphy.features & nl80211_associate()
7254 !(rdev->wiphy.features & NL80211_FEATURE_QUIET)) nl80211_associate()
7259 err = nl80211_crypto_settings(rdev, info, &req.crypto, 1); nl80211_associate()
7262 err = cfg80211_mlme_assoc(rdev, dev, chan, bssid, nl80211_associate()
7272 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_deauthenticate() local
7288 if (!rdev->ops->deauth) nl80211_deauthenticate()
7311 err = cfg80211_mlme_deauth(rdev, dev, bssid, ie, ie_len, reason_code, nl80211_deauthenticate()
7319 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_disassociate() local
7335 if (!rdev->ops->disassoc) nl80211_disassociate()
7358 err = cfg80211_mlme_disassoc(rdev, dev, bssid, ie, ie_len, reason_code, nl80211_disassociate()
7365 nl80211_parse_mcast_rate(struct cfg80211_registered_device *rdev, nl80211_parse_mcast_rate() argument
7369 struct wiphy *wiphy = &rdev->wiphy; nl80211_parse_mcast_rate()
7394 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_join_ibss() local
7419 if (!rdev->ops->join_ibss) nl80211_join_ibss()
7425 wiphy = &rdev->wiphy; nl80211_join_ibss()
7441 err = nl80211_parse_chandef(rdev, info, &ibss.chandef); nl80211_join_ibss()
7445 if (!cfg80211_reg_can_beacon(&rdev->wiphy, &ibss.chandef, nl80211_join_ibss()
7456 if (!(rdev->wiphy.features & NL80211_FEATURE_HT_IBSS)) nl80211_join_ibss()
7462 if (!(rdev->wiphy.features & NL80211_FEATURE_HT_IBSS)) nl80211_join_ibss()
7464 if (!wiphy_ext_feature_isset(&rdev->wiphy, nl80211_join_ibss()
7503 !nl80211_parse_mcast_rate(rdev, ibss.mcast_rate, nl80211_join_ibss()
7510 connkeys = nl80211_parse_connkeys(rdev, nl80211_join_ibss()
7529 err = cfg80211_join_ibss(rdev, dev, &ibss, connkeys); nl80211_join_ibss()
7537 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_leave_ibss() local
7540 if (!rdev->ops->leave_ibss) nl80211_leave_ibss()
7546 return cfg80211_leave_ibss(rdev, dev, false); nl80211_leave_ibss()
7551 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_mcast_rate() local
7562 if (!rdev->ops->set_mcast_rate) nl80211_set_mcast_rate()
7571 if (!nl80211_parse_mcast_rate(rdev, mcast_rate, nla_rate)) nl80211_set_mcast_rate()
7574 err = rdev->ops->set_mcast_rate(&rdev->wiphy, dev, mcast_rate); nl80211_set_mcast_rate()
7580 __cfg80211_alloc_vendor_skb(struct cfg80211_registered_device *rdev, __cfg80211_alloc_vendor_skb() argument
7601 if (nla_put_u32(skb, NL80211_ATTR_WIPHY, rdev->wiphy_idx)) __cfg80211_alloc_vendor_skb()
7625 ((void **)skb->cb)[0] = rdev; __cfg80211_alloc_vendor_skb()
7643 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); __cfg80211_alloc_event_skb() local
7663 return __cfg80211_alloc_vendor_skb(rdev, wdev, approxlen, 0, 0, __cfg80211_alloc_event_skb()
7670 struct cfg80211_registered_device *rdev = ((void **)skb->cb)[0]; __cfg80211_send_event_skb() local
7684 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), skb, 0, __cfg80211_send_event_skb()
7692 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_testmode_do() local
7697 if (!rdev->ops->testmode_cmd) nl80211_testmode_do()
7705 } else if (wdev->wiphy != &rdev->wiphy) { nl80211_testmode_do()
7712 rdev->cur_cmd_info = info; nl80211_testmode_do()
7713 err = rdev_testmode_cmd(rdev, wdev, nl80211_testmode_do()
7716 rdev->cur_cmd_info = NULL; nl80211_testmode_do()
7724 struct cfg80211_registered_device *rdev; nl80211_testmode_dump() local
7745 rdev = __cfg80211_rdev_from_attrs(sock_net(skb->sk), nl80211_testmode_dump()
7747 if (IS_ERR(rdev)) { nl80211_testmode_dump()
7748 err = PTR_ERR(rdev); nl80211_testmode_dump()
7751 phy_idx = rdev->wiphy_idx; nl80211_testmode_dump()
7752 rdev = NULL; nl80211_testmode_dump()
7764 rdev = cfg80211_rdev_by_wiphy_idx(phy_idx); nl80211_testmode_dump()
7765 if (!rdev) { nl80211_testmode_dump()
7770 if (!rdev->ops->testmode_dump) { nl80211_testmode_dump()
7794 err = rdev_testmode_dump(rdev, skb, cb, data, data_len); nl80211_testmode_dump()
7819 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_connect() local
7838 if (!nl80211_valid_auth_type(rdev, connect.auth_type, nl80211_connect()
7846 err = nl80211_crypto_settings(rdev, info, &connect.crypto, nl80211_connect()
7855 wiphy = &rdev->wiphy; nl80211_connect()
7899 connkeys = nl80211_parse_connkeys(rdev, nl80211_connect()
7942 if (!(rdev->wiphy.features & nl80211_connect()
7944 !(rdev->wiphy.features & NL80211_FEATURE_QUIET)) { nl80211_connect()
7952 err = cfg80211_connect(rdev, dev, &connect, connkeys, NULL); nl80211_connect()
7961 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_disconnect() local
7979 ret = cfg80211_disconnect(rdev, dev, reason, true); nl80211_disconnect()
7986 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_wiphy_netns() local
8008 if (!net_eq(wiphy_net(&rdev->wiphy), net)) nl80211_wiphy_netns()
8009 err = cfg80211_switch_netns(rdev, net); nl80211_wiphy_netns()
8017 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_setdel_pmksa() local
8040 rdev_ops = rdev->ops->set_pmksa; nl80211_setdel_pmksa()
8043 rdev_ops = rdev->ops->del_pmksa; nl80211_setdel_pmksa()
8053 return rdev_ops(&rdev->wiphy, dev, &pmksa); nl80211_setdel_pmksa()
8058 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_flush_pmksa() local
8065 if (!rdev->ops->flush_pmksa) nl80211_flush_pmksa()
8068 return rdev_flush_pmksa(rdev, dev); nl80211_flush_pmksa()
8073 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tdls_mgmt() local
8081 if (!(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_TDLS) || nl80211_tdls_mgmt()
8082 !rdev->ops->tdls_mgmt) nl80211_tdls_mgmt()
8101 return rdev_tdls_mgmt(rdev, dev, peer, action_code, nl80211_tdls_mgmt()
8110 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tdls_oper() local
8115 if (!(rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_TDLS) || nl80211_tdls_oper()
8116 !rdev->ops->tdls_oper) nl80211_tdls_oper()
8126 return rdev_tdls_oper(rdev, dev, peer, operation); nl80211_tdls_oper()
8132 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_remain_on_channel() local
8147 if (!rdev->ops->remain_on_channel || nl80211_remain_on_channel()
8148 !(rdev->wiphy.flags & WIPHY_FLAG_HAS_REMAIN_ON_CHANNEL)) nl80211_remain_on_channel()
8156 duration > rdev->wiphy.max_remain_on_channel_duration) nl80211_remain_on_channel()
8159 err = nl80211_parse_chandef(rdev, info, &chandef); nl80211_remain_on_channel()
8174 err = rdev_remain_on_channel(rdev, wdev, chandef.chan, nl80211_remain_on_channel()
8197 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_cancel_remain_on_channel() local
8204 if (!rdev->ops->cancel_remain_on_channel) nl80211_cancel_remain_on_channel()
8209 return rdev_cancel_remain_on_channel(rdev, wdev, cookie); nl80211_cancel_remain_on_channel()
8337 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_tx_bitrate_mask() local
8345 if (!rdev->ops->set_bitrate_mask) nl80211_set_tx_bitrate_mask()
8351 sband = rdev->wiphy.bands[i]; nl80211_set_tx_bitrate_mask()
8383 sband = rdev->wiphy.bands[band]; nl80211_set_tx_bitrate_mask()
8425 if (!(rdev->wiphy.bands[band]->ht_cap.ht_supported || nl80211_set_tx_bitrate_mask()
8426 rdev->wiphy.bands[band]->vht_cap.vht_supported)) nl80211_set_tx_bitrate_mask()
8443 return rdev_set_bitrate_mask(rdev, dev, NULL, &mask); nl80211_set_tx_bitrate_mask()
8448 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_register_mgmt() local
8473 if (!rdev->ops->mgmt_tx) nl80211_register_mgmt()
8483 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tx_mgmt() local
8498 if (!rdev->ops->mgmt_tx) nl80211_tx_mgmt()
8518 if (!(rdev->wiphy.flags & WIPHY_FLAG_OFFCHAN_TX)) nl80211_tx_mgmt()
8527 params.wait > rdev->wiphy.max_remain_on_channel_duration) nl80211_tx_mgmt()
8534 if (params.offchan && !(rdev->wiphy.flags & WIPHY_FLAG_OFFCHAN_TX)) nl80211_tx_mgmt()
8544 err = nl80211_parse_chandef(rdev, info, &chandef); nl80211_tx_mgmt()
8587 err = cfg80211_mlme_mgmt_tx(rdev, wdev, &params, &cookie); nl80211_tx_mgmt()
8610 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tx_mgmt_cancel_wait() local
8617 if (!rdev->ops->mgmt_tx_cancel_wait) nl80211_tx_mgmt_cancel_wait()
8635 return rdev_mgmt_tx_cancel_wait(rdev, wdev, cookie); nl80211_tx_mgmt_cancel_wait()
8640 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_power_save() local
8657 if (!rdev->ops->set_power_mgmt) nl80211_set_power_save()
8665 err = rdev_set_power_mgmt(rdev, dev, state, wdev->ps_timeout); nl80211_set_power_save()
8673 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_power_save() local
8683 if (!rdev->ops->set_power_mgmt) nl80211_get_power_save()
8728 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_cqm_txe() local
8735 if (!rdev->ops->set_cqm_txe_config) nl80211_set_cqm_txe()
8742 return rdev_set_cqm_txe_config(rdev, dev, rate, pkts, intvl); nl80211_set_cqm_txe()
8748 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_cqm_rssi() local
8759 if (!rdev->ops->set_cqm_rssi_config) nl80211_set_cqm_rssi()
8766 return rdev_set_cqm_rssi_config(rdev, dev, threshold, hysteresis); nl80211_set_cqm_rssi()
8807 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_join_ocb() local
8812 err = nl80211_parse_chandef(rdev, info, &setup.chandef); nl80211_join_ocb()
8816 return cfg80211_join_ocb(rdev, dev, &setup); nl80211_join_ocb()
8821 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_leave_ocb() local
8824 return cfg80211_leave_ocb(rdev, dev); nl80211_leave_ocb()
8829 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_join_mesh() local
8854 !nl80211_parse_mcast_rate(rdev, setup.mcast_rate, nl80211_join_mesh()
8884 err = nl80211_parse_chandef(rdev, info, &setup.chandef); nl80211_join_mesh()
8901 sband = rdev->wiphy.bands[setup.chandef.chan->band]; nl80211_join_mesh()
8909 return cfg80211_join_mesh(rdev, dev, &setup, &cfg); nl80211_join_mesh()
8914 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_leave_mesh() local
8917 return cfg80211_leave_mesh(rdev, dev); nl80211_leave_mesh()
8922 struct cfg80211_registered_device *rdev) nl80211_send_wowlan_patterns()
8924 struct cfg80211_wowlan *wowlan = rdev->wiphy.wowlan_config; nl80211_send_wowlan_patterns()
9063 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_wowlan() local
9068 if (!rdev->wiphy.wowlan) nl80211_get_wowlan()
9071 if (rdev->wiphy.wowlan_config && rdev->wiphy.wowlan_config->tcp) { nl80211_get_wowlan()
9073 size += rdev->wiphy.wowlan_config->tcp->tokens_size + nl80211_get_wowlan()
9074 rdev->wiphy.wowlan_config->tcp->payload_len + nl80211_get_wowlan()
9075 rdev->wiphy.wowlan_config->tcp->wake_len + nl80211_get_wowlan()
9076 rdev->wiphy.wowlan_config->tcp->wake_len / 8; nl80211_get_wowlan()
9088 if (rdev->wiphy.wowlan_config) { nl80211_get_wowlan()
9095 if ((rdev->wiphy.wowlan_config->any && nl80211_get_wowlan()
9097 (rdev->wiphy.wowlan_config->disconnect && nl80211_get_wowlan()
9099 (rdev->wiphy.wowlan_config->magic_pkt && nl80211_get_wowlan()
9101 (rdev->wiphy.wowlan_config->gtk_rekey_failure && nl80211_get_wowlan()
9103 (rdev->wiphy.wowlan_config->eap_identity_req && nl80211_get_wowlan()
9105 (rdev->wiphy.wowlan_config->four_way_handshake && nl80211_get_wowlan()
9107 (rdev->wiphy.wowlan_config->rfkill_release && nl80211_get_wowlan()
9111 if (nl80211_send_wowlan_patterns(msg, rdev)) nl80211_get_wowlan()
9115 rdev->wiphy.wowlan_config->tcp)) nl80211_get_wowlan()
9120 rdev->wiphy.wowlan_config->nd_config)) nl80211_get_wowlan()
9134 static int nl80211_parse_wowlan_tcp(struct cfg80211_registered_device *rdev, nl80211_parse_wowlan_tcp() argument
9146 if (!rdev->wiphy.wowlan->tcp) nl80211_parse_wowlan_tcp()
9166 if (data_size > rdev->wiphy.wowlan->tcp->data_payload_max) nl80211_parse_wowlan_tcp()
9170 rdev->wiphy.wowlan->tcp->data_interval_max || nl80211_parse_wowlan_tcp()
9175 if (wake_size > rdev->wiphy.wowlan->tcp->wake_payload_max) nl80211_parse_wowlan_tcp()
9190 if (!rdev->wiphy.wowlan->tcp->tok) nl80211_parse_wowlan_tcp()
9192 if (tok->len > rdev->wiphy.wowlan->tcp->tok->max_len) nl80211_parse_wowlan_tcp()
9194 if (tok->len < rdev->wiphy.wowlan->tcp->tok->min_len) nl80211_parse_wowlan_tcp()
9196 if (tokens_size > rdev->wiphy.wowlan->tcp->tok->bufsize) nl80211_parse_wowlan_tcp()
9204 if (!rdev->wiphy.wowlan->tcp->seq) nl80211_parse_wowlan_tcp()
9230 err = __sock_create(wiphy_net(&rdev->wiphy), PF_INET, SOCK_STREAM, nl80211_parse_wowlan_tcp()
9279 static int nl80211_parse_wowlan_nd(struct cfg80211_registered_device *rdev, nl80211_parse_wowlan_nd() argument
9302 trig->nd_config = nl80211_parse_sched_scan(&rdev->wiphy, NULL, tb); nl80211_parse_wowlan_nd()
9314 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_wowlan() local
9318 const struct wiphy_wowlan_support *wowlan = rdev->wiphy.wowlan; nl80211_set_wowlan()
9320 bool prev_enabled = rdev->wiphy.wowlan_config; nl80211_set_wowlan()
9327 cfg80211_rdev_free_wowlan(rdev); nl80211_set_wowlan()
9328 rdev->wiphy.wowlan_config = NULL; nl80211_set_wowlan()
9461 rdev, tb[NL80211_WOWLAN_TRIG_TCP_CONNECTION],
9470 rdev, wowlan, tb[NL80211_WOWLAN_TRIG_NET_DETECT],
9492 cfg80211_rdev_free_wowlan(rdev);
9493 rdev->wiphy.wowlan_config = ntrig;
9496 if (rdev->ops->set_wakeup &&
9497 prev_enabled != !!rdev->wiphy.wowlan_config)
9498 rdev_set_wakeup(rdev, rdev->wiphy.wowlan_config);
9514 struct cfg80211_registered_device *rdev) nl80211_send_coalesce_rules()
9520 if (!rdev->coalesce->n_rules) nl80211_send_coalesce_rules()
9527 for (i = 0; i < rdev->coalesce->n_rules; i++) { nl80211_send_coalesce_rules()
9532 rule = &rdev->coalesce->rules[i]; nl80211_send_coalesce_rules()
9571 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_get_coalesce() local
9575 if (!rdev->wiphy.coalesce) nl80211_get_coalesce()
9587 if (rdev->coalesce && nl80211_send_coalesce_rules(msg, rdev)) nl80211_get_coalesce()
9598 void cfg80211_rdev_free_coalesce(struct cfg80211_registered_device *rdev) cfg80211_rdev_free_coalesce() argument
9600 struct cfg80211_coalesce *coalesce = rdev->coalesce; cfg80211_rdev_free_coalesce()
9615 rdev->coalesce = NULL; cfg80211_rdev_free_coalesce()
9618 static int nl80211_parse_coalesce_rule(struct cfg80211_registered_device *rdev, nl80211_parse_coalesce_rule() argument
9623 const struct wiphy_coalesce_support *coalesce = rdev->wiphy.coalesce; nl80211_parse_coalesce_rule()
9709 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_coalesce() local
9710 const struct wiphy_coalesce_support *coalesce = rdev->wiphy.coalesce; nl80211_set_coalesce()
9717 if (!rdev->wiphy.coalesce || !rdev->ops->set_coalesce) nl80211_set_coalesce()
9721 cfg80211_rdev_free_coalesce(rdev); nl80211_set_coalesce()
9722 rdev->ops->set_coalesce(&rdev->wiphy, NULL); nl80211_set_coalesce()
9742 err = nl80211_parse_coalesce_rule(rdev, rule, nl80211_set_coalesce()
9750 err = rdev->ops->set_coalesce(&rdev->wiphy, &new_coalesce); nl80211_set_coalesce()
9759 cfg80211_rdev_free_coalesce(rdev); nl80211_set_coalesce()
9760 rdev->coalesce = n_coalesce; nl80211_set_coalesce()
9777 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_rekey_data() local
9811 if (!rdev->ops->set_rekey_data) { nl80211_set_rekey_data()
9816 err = rdev_set_rekey_data(rdev, dev, &rekey_data); nl80211_set_rekey_data()
9842 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_probe_client() local
9858 if (!rdev->ops->probe_client) nl80211_probe_client()
9874 err = rdev_probe_client(rdev, dev, addr, &cookie); nl80211_probe_client()
9894 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_register_beacons() local
9898 if (!(rdev->wiphy.flags & WIPHY_FLAG_REPORTS_OBSS)) nl80211_register_beacons()
9906 spin_lock_bh(&rdev->beacon_registrations_lock); nl80211_register_beacons()
9907 list_for_each_entry(reg, &rdev->beacon_registrations, list) { nl80211_register_beacons()
9915 list_add(&nreg->list, &rdev->beacon_registrations); nl80211_register_beacons()
9917 spin_unlock_bh(&rdev->beacon_registrations_lock); nl80211_register_beacons()
9921 spin_unlock_bh(&rdev->beacon_registrations_lock); nl80211_register_beacons()
9928 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_start_p2p_device() local
9932 if (!rdev->ops->start_p2p_device) nl80211_start_p2p_device()
9941 if (rfkill_blocked(rdev->rfkill)) nl80211_start_p2p_device()
9944 err = rdev_start_p2p_device(rdev, wdev); nl80211_start_p2p_device()
9949 rdev->opencount++; nl80211_start_p2p_device()
9956 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_stop_p2p_device() local
9962 if (!rdev->ops->stop_p2p_device) nl80211_stop_p2p_device()
9965 cfg80211_stop_p2p_device(rdev, wdev); nl80211_stop_p2p_device()
9999 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_update_ft_ies() local
10003 if (!rdev->ops->update_ft_ies) nl80211_update_ft_ies()
10015 return rdev_update_ft_ies(rdev, dev, &ft_params); nl80211_update_ft_ies()
10021 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_crit_protocol_start() local
10027 if (!rdev->ops->crit_proto_start) nl80211_crit_protocol_start()
10030 if (WARN_ON(!rdev->ops->crit_proto_stop)) nl80211_crit_protocol_start()
10033 if (rdev->crit_proto_nlportid) nl80211_crit_protocol_start()
10053 ret = rdev_crit_proto_start(rdev, wdev, proto, duration); nl80211_crit_protocol_start()
10055 rdev->crit_proto_nlportid = info->snd_portid; nl80211_crit_protocol_start()
10063 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_crit_protocol_stop() local
10066 if (!rdev->ops->crit_proto_stop) nl80211_crit_protocol_stop()
10069 if (rdev->crit_proto_nlportid) { nl80211_crit_protocol_stop()
10070 rdev->crit_proto_nlportid = 0; nl80211_crit_protocol_stop()
10071 rdev_crit_proto_stop(rdev, wdev); nl80211_crit_protocol_stop()
10078 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_vendor_cmd() local
10084 if (!rdev->wiphy.vendor_commands) nl80211_vendor_cmd()
10092 } else if (wdev->wiphy != &rdev->wiphy) { nl80211_vendor_cmd()
10102 for (i = 0; i < rdev->wiphy.n_vendor_commands; i++) { nl80211_vendor_cmd()
10107 vcmd = &rdev->wiphy.vendor_commands[i]; nl80211_vendor_cmd()
10139 rdev->cur_cmd_info = info; nl80211_vendor_cmd()
10140 err = rdev->wiphy.vendor_commands[i].doit(&rdev->wiphy, wdev, nl80211_vendor_cmd()
10142 rdev->cur_cmd_info = NULL; nl80211_vendor_cmd()
10151 struct cfg80211_registered_device **rdev, nl80211_prepare_vendor_dump()
10172 *rdev = wiphy_to_rdev(wiphy); nl80211_prepare_vendor_dump()
10176 list_for_each_entry(tmp, &(*rdev)->wdev_list, list) { nl80211_prepare_vendor_dump()
10205 *rdev = __cfg80211_rdev_from_attrs(sock_net(skb->sk), nl80211_prepare_vendor_dump()
10207 if (IS_ERR(*rdev)) { nl80211_prepare_vendor_dump()
10208 err = PTR_ERR(*rdev); nl80211_prepare_vendor_dump()
10215 for (i = 0; i < (*rdev)->wiphy.n_vendor_commands; i++) { nl80211_prepare_vendor_dump()
10218 vcmd = &(*rdev)->wiphy.vendor_commands[i]; nl80211_prepare_vendor_dump()
10243 cb->args[0] = (*rdev)->wiphy_idx + 1; nl80211_prepare_vendor_dump()
10260 struct cfg80211_registered_device *rdev; nl80211_vendor_cmd_dump() local
10269 err = nl80211_prepare_vendor_dump(skb, cb, &rdev, &wdev); nl80211_vendor_cmd_dump()
10276 vcmd = &rdev->wiphy.vendor_commands[vcmd_idx]; nl80211_vendor_cmd_dump()
10302 if (nla_put_u32(skb, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_vendor_cmd_dump()
10315 err = vcmd->dumpit(&rdev->wiphy, wdev, skb, data, data_len, nl80211_vendor_cmd_dump()
10341 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); __cfg80211_alloc_reply_skb() local
10343 if (WARN_ON(!rdev->cur_cmd_info)) __cfg80211_alloc_reply_skb()
10346 return __cfg80211_alloc_vendor_skb(rdev, NULL, approxlen, __cfg80211_alloc_reply_skb()
10347 rdev->cur_cmd_info->snd_portid, __cfg80211_alloc_reply_skb()
10348 rdev->cur_cmd_info->snd_seq, __cfg80211_alloc_reply_skb()
10355 struct cfg80211_registered_device *rdev = ((void **)skb->cb)[0]; cfg80211_vendor_cmd_reply() local
10362 if (WARN_ON(!rdev->cur_cmd_info)) { cfg80211_vendor_cmd_reply()
10369 return genlmsg_reply(skb, rdev->cur_cmd_info); cfg80211_vendor_cmd_reply()
10377 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_set_qos_map() local
10383 if (!rdev->ops->set_qos_map) nl80211_set_qos_map()
10418 ret = rdev_set_qos_map(rdev, dev, qos_map); nl80211_set_qos_map()
10427 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_add_tx_ts() local
10435 if (!(rdev->wiphy.features & NL80211_FEATURE_SUPPORTS_WMM_ADMISSION)) nl80211_add_tx_ts()
10481 err = rdev_add_tx_ts(rdev, dev, tsid, peer, up, admitted_time); nl80211_add_tx_ts()
10490 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_del_tx_ts() local
10504 err = rdev_del_tx_ts(rdev, dev, tsid, peer); nl80211_del_tx_ts()
10513 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tdls_channel_switch() local
10521 if (!rdev->ops->tdls_channel_switch || nl80211_tdls_channel_switch()
10522 !(rdev->wiphy.features & NL80211_FEATURE_TDLS_CHANNEL_SWITCH)) nl80211_tdls_channel_switch()
10537 err = nl80211_parse_chandef(rdev, info, &chandef); nl80211_tdls_channel_switch()
10552 if (!cfg80211_reg_can_beacon_relax(&rdev->wiphy, &chandef, nl80211_tdls_channel_switch()
10564 err = rdev_tdls_channel_switch(rdev, dev, addr, oper_class, &chandef); nl80211_tdls_channel_switch()
10573 struct cfg80211_registered_device *rdev = info->user_ptr[0]; nl80211_tdls_cancel_channel_switch() local
10578 if (!rdev->ops->tdls_channel_switch || nl80211_tdls_cancel_channel_switch()
10579 !rdev->ops->tdls_cancel_channel_switch || nl80211_tdls_cancel_channel_switch()
10580 !(rdev->wiphy.features & NL80211_FEATURE_TDLS_CHANNEL_SWITCH)) nl80211_tdls_cancel_channel_switch()
10597 rdev_tdls_cancel_channel_switch(rdev, dev, addr); nl80211_tdls_cancel_channel_switch()
10618 struct cfg80211_registered_device *rdev; nl80211_pre_doit() local
10627 rdev = cfg80211_get_dev_from_info(genl_info_net(info), info); nl80211_pre_doit()
10628 if (IS_ERR(rdev)) { nl80211_pre_doit()
10631 return PTR_ERR(rdev); nl80211_pre_doit()
10633 info->user_ptr[0] = rdev; nl80211_pre_doit()
10647 rdev = wiphy_to_rdev(wdev->wiphy); nl80211_pre_doit()
10678 info->user_ptr[0] = rdev; nl80211_pre_doit()
11425 void nl80211_notify_wiphy(struct cfg80211_registered_device *rdev, nl80211_notify_wiphy() argument
11438 if (nl80211_send_wiphy(rdev, cmd, msg, 0, 0, 0, &state) < 0) { nl80211_notify_wiphy()
11443 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_notify_wiphy()
11448 struct cfg80211_registered_device *rdev) nl80211_add_scan_req()
11450 struct cfg80211_scan_request *req = rdev->scan_req; nl80211_add_scan_req()
11489 struct cfg80211_registered_device *rdev, nl80211_send_scan_msg()
11500 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_scan_msg()
11507 nl80211_add_scan_req(msg, rdev); nl80211_send_scan_msg()
11519 struct cfg80211_registered_device *rdev, nl80211_send_sched_scan_msg()
11529 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_sched_scan_msg()
11541 void nl80211_send_scan_start(struct cfg80211_registered_device *rdev, nl80211_send_scan_start() argument
11550 if (nl80211_send_scan_msg(msg, rdev, wdev, 0, 0, 0, nl80211_send_scan_start()
11556 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_scan_start()
11560 struct sk_buff *nl80211_build_scan_msg(struct cfg80211_registered_device *rdev, nl80211_build_scan_msg() argument
11569 if (nl80211_send_scan_msg(msg, rdev, wdev, 0, 0, 0, nl80211_build_scan_msg()
11579 void nl80211_send_scan_result(struct cfg80211_registered_device *rdev, nl80211_send_scan_result() argument
11585 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_scan_result()
11589 void nl80211_send_sched_scan_results(struct cfg80211_registered_device *rdev, nl80211_send_sched_scan_results() argument
11598 if (nl80211_send_sched_scan_msg(msg, rdev, netdev, 0, 0, 0, nl80211_send_sched_scan_results()
11604 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_sched_scan_results()
11608 void nl80211_send_sched_scan(struct cfg80211_registered_device *rdev, nl80211_send_sched_scan() argument
11617 if (nl80211_send_sched_scan_msg(msg, rdev, netdev, 0, 0, 0, cmd) < 0) { nl80211_send_sched_scan()
11622 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_sched_scan()
11710 static void nl80211_send_mlme_event(struct cfg80211_registered_device *rdev, nl80211_send_mlme_event() argument
11729 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_mlme_event()
11749 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_mlme_event()
11758 void nl80211_send_rx_auth(struct cfg80211_registered_device *rdev, nl80211_send_rx_auth() argument
11762 nl80211_send_mlme_event(rdev, netdev, buf, len, nl80211_send_rx_auth()
11766 void nl80211_send_rx_assoc(struct cfg80211_registered_device *rdev, nl80211_send_rx_assoc() argument
11770 nl80211_send_mlme_event(rdev, netdev, buf, len, nl80211_send_rx_assoc()
11774 void nl80211_send_deauth(struct cfg80211_registered_device *rdev, nl80211_send_deauth() argument
11778 nl80211_send_mlme_event(rdev, netdev, buf, len, nl80211_send_deauth()
11782 void nl80211_send_disassoc(struct cfg80211_registered_device *rdev, nl80211_send_disassoc() argument
11786 nl80211_send_mlme_event(rdev, netdev, buf, len, nl80211_send_disassoc()
11795 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_rx_unprot_mlme_mgmt() local
11808 nl80211_send_mlme_event(rdev, dev, buf, len, cmd, GFP_ATOMIC, -1); cfg80211_rx_unprot_mlme_mgmt()
11812 static void nl80211_send_mlme_timeout(struct cfg80211_registered_device *rdev, nl80211_send_mlme_timeout() argument
11829 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_mlme_timeout()
11837 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_mlme_timeout()
11846 void nl80211_send_auth_timeout(struct cfg80211_registered_device *rdev, nl80211_send_auth_timeout() argument
11850 nl80211_send_mlme_timeout(rdev, netdev, NL80211_CMD_AUTHENTICATE, nl80211_send_auth_timeout()
11854 void nl80211_send_assoc_timeout(struct cfg80211_registered_device *rdev, nl80211_send_assoc_timeout() argument
11858 nl80211_send_mlme_timeout(rdev, netdev, NL80211_CMD_ASSOCIATE, nl80211_send_assoc_timeout()
11862 void nl80211_send_connect_result(struct cfg80211_registered_device *rdev, nl80211_send_connect_result() argument
11881 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_connect_result()
11893 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_connect_result()
11903 void nl80211_send_roamed(struct cfg80211_registered_device *rdev, nl80211_send_roamed() argument
11921 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_roamed()
11932 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_roamed()
11942 void nl80211_send_disconnected(struct cfg80211_registered_device *rdev, nl80211_send_disconnected() argument
11959 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_disconnected()
11970 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_disconnected()
11980 void nl80211_send_ibss_bssid(struct cfg80211_registered_device *rdev, nl80211_send_ibss_bssid() argument
11997 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_ibss_bssid()
12004 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_ibss_bssid()
12017 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_notify_new_peer_candidate() local
12036 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_notify_new_peer_candidate()
12045 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_notify_new_peer_candidate()
12055 void nl80211_michael_mic_failure(struct cfg80211_registered_device *rdev, nl80211_michael_mic_failure() argument
12073 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_michael_mic_failure()
12084 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_michael_mic_failure()
12149 int cmd, struct cfg80211_registered_device *rdev, nl80211_send_remain_on_chan_event()
12167 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_remain_on_chan_event()
12183 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_send_remain_on_chan_event()
12197 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_ready_on_channel() local
12201 rdev, wdev, cookie, chan, cfg80211_ready_on_channel()
12211 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_remain_on_channel_expired() local
12215 rdev, wdev, cookie, chan, 0, gfp); cfg80211_remain_on_channel_expired()
12223 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_new_sta() local
12233 rdev, dev, mac_addr, sinfo) < 0) { cfg80211_new_sta()
12238 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_new_sta()
12247 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_del_sta_sinfo() local
12261 rdev, dev, mac_addr, sinfo) < 0) { cfg80211_del_sta_sinfo()
12266 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_del_sta_sinfo()
12276 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_conn_failed() local
12297 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_conn_failed()
12311 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); __nl80211_unexpected_frame() local
12329 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || __nl80211_unexpected_frame()
12335 genlmsg_unicast(wiphy_net(&rdev->wiphy), msg, nlportid); __nl80211_unexpected_frame()
12386 int nl80211_send_mgmt(struct cfg80211_registered_device *rdev, nl80211_send_mgmt() argument
12405 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_mgmt()
12419 return genlmsg_unicast(wiphy_net(&rdev->wiphy), msg, nlportid); nl80211_send_mgmt()
12431 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_mgmt_tx_status() local
12448 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_mgmt_tx_status()
12459 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_mgmt_tx_status()
12473 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_prepare_cqm() local
12488 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_prepare_cqm()
12499 cb[2] = rdev; cfg80211_prepare_cqm()
12510 struct cfg80211_registered_device *rdev = cb[2]; cfg80211_send_cqm() local
12517 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_send_cqm()
12618 static void nl80211_gtk_rekey_notify(struct cfg80211_registered_device *rdev, nl80211_gtk_rekey_notify() argument
12636 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_gtk_rekey_notify()
12653 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_gtk_rekey_notify()
12667 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_gtk_rekey_notify() local
12670 nl80211_gtk_rekey_notify(rdev, dev, bssid, replay_ctr, gfp); cfg80211_gtk_rekey_notify()
12675 nl80211_pmksa_candidate_notify(struct cfg80211_registered_device *rdev, nl80211_pmksa_candidate_notify() argument
12693 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_pmksa_candidate_notify()
12711 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_pmksa_candidate_notify()
12725 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_pmksa_candidate_notify() local
12728 nl80211_pmksa_candidate_notify(rdev, dev, index, bssid, preauth, gfp); cfg80211_pmksa_candidate_notify()
12732 static void nl80211_ch_switch_notify(struct cfg80211_registered_device *rdev, nl80211_ch_switch_notify() argument
12764 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_ch_switch_notify()
12778 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_ch_switch_notify() local
12786 nl80211_ch_switch_notify(rdev, dev, chandef, GFP_KERNEL, cfg80211_ch_switch_notify()
12797 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_ch_switch_started_notify() local
12801 nl80211_ch_switch_notify(rdev, dev, chandef, GFP_KERNEL, cfg80211_ch_switch_started_notify()
12807 nl80211_radar_notify(struct cfg80211_registered_device *rdev, nl80211_radar_notify() argument
12825 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx)) nl80211_radar_notify()
12845 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, nl80211_radar_notify()
12858 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_probe_status() local
12875 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_probe_status()
12884 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_probe_status()
12898 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_report_obss_beacon() local
12905 spin_lock_bh(&rdev->beacon_registrations_lock); cfg80211_report_obss_beacon()
12906 list_for_each_entry(reg, &rdev->beacon_registrations, list) { cfg80211_report_obss_beacon()
12909 spin_unlock_bh(&rdev->beacon_registrations_lock); cfg80211_report_obss_beacon()
12917 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_report_obss_beacon()
12927 genlmsg_unicast(wiphy_net(&rdev->wiphy), msg, reg->nlportid); cfg80211_report_obss_beacon()
12929 spin_unlock_bh(&rdev->beacon_registrations_lock); cfg80211_report_obss_beacon()
12933 spin_unlock_bh(&rdev->beacon_registrations_lock); cfg80211_report_obss_beacon()
13004 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_report_wowlan_wakeup() local
13022 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_report_wowlan_wakeup()
13103 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_report_wowlan_wakeup()
13118 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_tdls_oper_request() local
13135 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_tdls_oper_request()
13145 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_tdls_oper_request()
13160 struct cfg80211_registered_device *rdev; nl80211_netlink_notify() local
13169 list_for_each_entry_rcu(rdev, &cfg80211_rdev_list, list) { nl80211_netlink_notify()
13173 rcu_dereference(rdev->sched_scan_req); nl80211_netlink_notify()
13179 list_for_each_entry_rcu(wdev, &rdev->wdev_list, list) { nl80211_netlink_notify()
13186 spin_lock_bh(&rdev->beacon_registrations_lock); nl80211_netlink_notify()
13187 list_for_each_entry_safe(reg, tmp, &rdev->beacon_registrations, nl80211_netlink_notify()
13195 spin_unlock_bh(&rdev->beacon_registrations_lock); nl80211_netlink_notify()
13203 spin_lock(&rdev->destroy_list_lock); nl80211_netlink_notify()
13204 list_add(&destroy->list, &rdev->destroy_list); nl80211_netlink_notify()
13205 spin_unlock(&rdev->destroy_list_lock); nl80211_netlink_notify()
13206 schedule_work(&rdev->destroy_work); nl80211_netlink_notify()
13211 if (rdev->ops->sched_scan_stop && nl80211_netlink_notify()
13212 rdev->wiphy.flags & WIPHY_FLAG_SUPPORTS_SCHED_SCAN) nl80211_netlink_notify()
13213 schedule_work(&rdev->sched_scan_stop_wk); nl80211_netlink_notify()
13235 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_ft_event() local
13252 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_ft_event()
13267 genlmsg_multicast_netns(&nl80211_fam, wiphy_net(&rdev->wiphy), msg, 0, cfg80211_ft_event()
13277 struct cfg80211_registered_device *rdev; cfg80211_crit_proto_stopped() local
13282 rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_crit_proto_stopped()
13283 if (!rdev->crit_proto_nlportid) cfg80211_crit_proto_stopped()
13286 nlportid = rdev->crit_proto_nlportid; cfg80211_crit_proto_stopped()
13287 rdev->crit_proto_nlportid = 0; cfg80211_crit_proto_stopped()
13297 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || cfg80211_crit_proto_stopped()
13303 genlmsg_unicast(wiphy_net(&rdev->wiphy), msg, nlportid); cfg80211_crit_proto_stopped()
13317 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); nl80211_send_ap_stopped() local
13329 if (nla_put_u32(msg, NL80211_ATTR_WIPHY, rdev->wiphy_idx) || nl80211_send_ap_stopped()
488 nl80211_prepare_wdev_dump(struct sk_buff *skb, struct netlink_callback *cb, struct cfg80211_registered_device **rdev, struct wireless_dev **wdev) nl80211_prepare_wdev_dump() argument
1057 nl80211_send_wowlan(struct sk_buff *msg, struct cfg80211_registered_device *rdev, bool large) nl80211_send_wowlan() argument
1115 nl80211_send_coalesce(struct sk_buff *msg, struct cfg80211_registered_device *rdev) nl80211_send_coalesce() argument
2379 nl80211_send_iface(struct sk_buff *msg, u32 portid, u32 seq, int flags, struct cfg80211_registered_device *rdev, struct wireless_dev *wdev, bool removal) nl80211_send_iface() argument
3699 nl80211_send_station(struct sk_buff *msg, u32 cmd, u32 portid, u32 seq, int flags, struct cfg80211_registered_device *rdev, struct net_device *dev, const u8 *mac_addr, struct station_info *sinfo) nl80211_send_station() argument
4108 get_vlan(struct genl_info *info, struct cfg80211_registered_device *rdev) get_vlan() argument
6679 nl80211_send_bss(struct sk_buff *msg, struct netlink_callback *cb, u32 seq, int flags, struct cfg80211_registered_device *rdev, struct wireless_dev *wdev, struct cfg80211_internal_bss *intbss) nl80211_send_bss() argument
8921 nl80211_send_wowlan_patterns(struct sk_buff *msg, struct cfg80211_registered_device *rdev) nl80211_send_wowlan_patterns() argument
9513 nl80211_send_coalesce_rules(struct sk_buff *msg, struct cfg80211_registered_device *rdev) nl80211_send_coalesce_rules() argument
10149 nl80211_prepare_vendor_dump(struct sk_buff *skb, struct netlink_callback *cb, struct cfg80211_registered_device **rdev, struct wireless_dev **wdev) nl80211_prepare_vendor_dump() argument
11447 nl80211_add_scan_req(struct sk_buff *msg, struct cfg80211_registered_device *rdev) nl80211_add_scan_req() argument
11488 nl80211_send_scan_msg(struct sk_buff *msg, struct cfg80211_registered_device *rdev, struct wireless_dev *wdev, u32 portid, u32 seq, int flags, u32 cmd) nl80211_send_scan_msg() argument
11518 nl80211_send_sched_scan_msg(struct sk_buff *msg, struct cfg80211_registered_device *rdev, struct net_device *netdev, u32 portid, u32 seq, int flags, u32 cmd) nl80211_send_sched_scan_msg() argument
12148 nl80211_send_remain_on_chan_event( int cmd, struct cfg80211_registered_device *rdev, struct wireless_dev *wdev, u64 cookie, struct ieee80211_channel *chan, unsigned int duration, gfp_t gfp) nl80211_send_remain_on_chan_event() argument
H A Dwext-compat.c22 #include "rdev-ops.h"
72 struct cfg80211_registered_device *rdev; cfg80211_wext_siwmode() local
76 rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwmode()
100 return cfg80211_change_iface(rdev, dev, type, NULL, &vifparams); cfg80211_wext_siwmode()
289 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwrts() local
300 err = rdev_set_wiphy_params(rdev, WIPHY_PARAM_RTS_THRESHOLD); cfg80211_wext_siwrts()
327 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwfrag() local
340 err = rdev_set_wiphy_params(rdev, WIPHY_PARAM_FRAG_THRESHOLD); cfg80211_wext_siwfrag()
367 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwretry() local
393 err = rdev_set_wiphy_params(rdev, changed); cfg80211_wext_siwretry()
432 static int __cfg80211_set_encryption(struct cfg80211_registered_device *rdev, __cfg80211_set_encryption() argument
462 if (!rdev->ops->set_default_mgmt_key) __cfg80211_set_encryption()
479 __cfg80211_leave_ibss(rdev, wdev->netdev, true); __cfg80211_set_encryption()
484 !(rdev->wiphy.flags & WIPHY_FLAG_IBSS_RSN)) __cfg80211_set_encryption()
487 err = rdev_del_key(rdev, dev, idx, pairwise, __cfg80211_set_encryption()
511 err = cfg80211_ibss_wext_join(rdev, wdev); __cfg80211_set_encryption()
519 if (cfg80211_validate_key_settings(rdev, params, idx, pairwise, addr)) __cfg80211_set_encryption()
524 err = rdev_add_key(rdev, dev, idx, pairwise, addr, params); __cfg80211_set_encryption()
547 __cfg80211_leave_ibss(rdev, wdev->netdev, true); __cfg80211_set_encryption()
550 err = rdev_set_default_key(rdev, dev, idx, true, true); __cfg80211_set_encryption()
555 err = cfg80211_ibss_wext_join(rdev, wdev); __cfg80211_set_encryption()
563 err = rdev_set_default_mgmt_key(rdev, dev, idx); __cfg80211_set_encryption()
572 static int cfg80211_set_encryption(struct cfg80211_registered_device *rdev, cfg80211_set_encryption() argument
580 err = __cfg80211_set_encryption(rdev, dev, pairwise, addr, cfg80211_set_encryption()
592 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwencode() local
602 if (!rdev->ops->del_key || cfg80211_wext_siwencode()
603 !rdev->ops->add_key || cfg80211_wext_siwencode()
604 !rdev->ops->set_default_key) cfg80211_wext_siwencode()
624 err = rdev_set_default_key(rdev, dev, idx, true, cfg80211_wext_siwencode()
642 return cfg80211_set_encryption(rdev, dev, false, NULL, remove, cfg80211_wext_siwencode()
652 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwencodeext() local
665 if (!rdev->ops->del_key || cfg80211_wext_siwencodeext()
666 !rdev->ops->add_key || cfg80211_wext_siwencodeext()
667 !rdev->ops->set_default_key) cfg80211_wext_siwencodeext()
731 rdev, dev, cfg80211_wext_siwencodeext()
780 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwfreq() local
798 chandef.chan = ieee80211_get_channel(&rdev->wiphy, freq); cfg80211_wext_siwfreq()
801 return cfg80211_set_monitor_channel(rdev, &chandef); cfg80211_wext_siwfreq()
809 chandef.chan = ieee80211_get_channel(&rdev->wiphy, freq); cfg80211_wext_siwfreq()
812 return cfg80211_set_mesh_channel(rdev, wdev, &chandef); cfg80211_wext_siwfreq()
823 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_giwfreq() local
833 if (!rdev->ops->get_channel) cfg80211_wext_giwfreq()
836 ret = rdev_get_channel(rdev, wdev, &chandef); cfg80211_wext_giwfreq()
852 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwtxpower() local
861 if (!rdev->ops->set_tx_power) cfg80211_wext_siwtxpower()
866 rfkill_set_sw_state(rdev->rfkill, false); cfg80211_wext_siwtxpower()
891 rfkill_set_sw_state(rdev->rfkill, true); cfg80211_wext_siwtxpower()
892 schedule_work(&rdev->rfkill_sync); cfg80211_wext_siwtxpower()
896 return rdev_set_tx_power(rdev, wdev, type, DBM_TO_MBM(dbm)); cfg80211_wext_siwtxpower()
904 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_giwtxpower() local
912 if (!rdev->ops->get_tx_power) cfg80211_wext_giwtxpower()
915 err = rdev_get_tx_power(rdev, wdev, &val); cfg80211_wext_giwtxpower()
921 data->txpower.disabled = rfkill_blocked(rdev->rfkill); cfg80211_wext_giwtxpower()
1124 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwpower() local
1132 if (!rdev->ops->set_power_mgmt) cfg80211_wext_siwpower()
1155 err = rdev_set_power_mgmt(rdev, dev, ps, timeout); cfg80211_wext_siwpower()
1182 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wds_wext_siwap() local
1194 if (!rdev->ops->set_wds_peer) cfg80211_wds_wext_siwap()
1197 err = rdev_set_wds_peer(rdev, dev, (u8 *)&addr->sa_data); cfg80211_wds_wext_siwap()
1226 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwrate() local
1233 if (!rdev->ops->set_bitrate_mask) cfg80211_wext_siwrate()
1269 return rdev_set_bitrate_mask(rdev, dev, NULL, &mask); cfg80211_wext_siwrate()
1277 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_giwrate() local
1286 if (!rdev->ops->get_station) cfg80211_wext_giwrate()
1299 err = rdev_get_station(rdev, dev, addr, &sinfo); cfg80211_wext_giwrate()
1315 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wireless_stats() local
1324 if (!rdev->ops->get_station) cfg80211_wireless_stats()
1338 if (rdev_get_station(rdev, dev, bssid, &sinfo)) cfg80211_wireless_stats()
1343 switch (rdev->wiphy.signal_type) { cfg80211_wireless_stats()
1456 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_wext_siwpmksa() local
1470 if (!rdev->ops->set_pmksa) cfg80211_wext_siwpmksa()
1473 return rdev_set_pmksa(rdev, dev, &cfg_pmksa); cfg80211_wext_siwpmksa()
1476 if (!rdev->ops->del_pmksa) cfg80211_wext_siwpmksa()
1479 return rdev_del_pmksa(rdev, dev, &cfg_pmksa); cfg80211_wext_siwpmksa()
1482 if (!rdev->ops->flush_pmksa) cfg80211_wext_siwpmksa()
1485 return rdev_flush_pmksa(rdev, dev); cfg80211_wext_siwpmksa()
H A Dsme.c21 #include "rdev-ops.h"
62 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_conn_scan() local
69 if (rdev->scan_req || rdev->scan_msg) cfg80211_conn_scan()
123 request->wiphy = &rdev->wiphy; cfg80211_conn_scan()
126 rdev->scan_req = request; cfg80211_conn_scan()
128 err = rdev_scan(rdev, request); cfg80211_conn_scan()
131 nl80211_send_scan_start(rdev, wdev); cfg80211_conn_scan()
134 rdev->scan_req = NULL; cfg80211_conn_scan()
142 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_conn_do_work() local
161 if (WARN_ON(!rdev->ops->auth)) cfg80211_conn_do_work()
164 return cfg80211_mlme_auth(rdev, wdev->netdev, cfg80211_conn_do_work()
174 if (WARN_ON(!rdev->ops->assoc)) cfg80211_conn_do_work()
189 err = cfg80211_mlme_assoc(rdev, wdev->netdev, params->channel, cfg80211_conn_do_work()
193 cfg80211_mlme_deauth(rdev, wdev->netdev, params->bssid, cfg80211_conn_do_work()
199 cfg80211_mlme_deauth(rdev, wdev->netdev, params->bssid, cfg80211_conn_do_work()
204 cfg80211_mlme_deauth(rdev, wdev->netdev, params->bssid, cfg80211_conn_do_work()
217 struct cfg80211_registered_device *rdev = cfg80211_conn_work() local
224 list_for_each_entry(wdev, &rdev->wdev_list, list) { cfg80211_conn_work()
258 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_get_conn_bss() local
276 schedule_work(&rdev->conn_work); cfg80211_get_conn_bss()
284 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); __cfg80211_sme_scan_done() local
298 cfg80211_put_bss(&rdev->wiphy, bss); __cfg80211_sme_scan_done()
300 schedule_work(&rdev->conn_work); __cfg80211_sme_scan_done()
315 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); cfg80211_sme_rx_auth() local
348 schedule_work(&rdev->conn_work); cfg80211_sme_rx_auth()
355 schedule_work(&rdev->conn_work); cfg80211_sme_rx_auth()
361 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_rx_assoc_resp() local
379 schedule_work(&rdev->conn_work); cfg80211_sme_rx_assoc_resp()
384 schedule_work(&rdev->conn_work); cfg80211_sme_rx_assoc_resp()
395 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_auth_timeout() local
401 schedule_work(&rdev->conn_work); cfg80211_sme_auth_timeout()
406 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_disassoc() local
412 schedule_work(&rdev->conn_work); cfg80211_sme_disassoc()
417 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_assoc_timeout() local
423 schedule_work(&rdev->conn_work); cfg80211_sme_assoc_timeout()
430 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_get_conn_ies() local
434 if (!rdev->wiphy.extended_capabilities_len || cfg80211_sme_get_conn_ies()
443 buf = kmalloc(ies_len + rdev->wiphy.extended_capabilities_len + 2, cfg80211_sme_get_conn_ies()
463 memcpy(buf + offs + rdev->wiphy.extended_capabilities_len + 2, cfg80211_sme_get_conn_ies()
471 buf[offs + 1] = rdev->wiphy.extended_capabilities_len; cfg80211_sme_get_conn_ies()
473 rdev->wiphy.extended_capabilities, cfg80211_sme_get_conn_ies()
474 rdev->wiphy.extended_capabilities_len); cfg80211_sme_get_conn_ies()
477 *out_ies_len = ies_len + rdev->wiphy.extended_capabilities_len + 2; cfg80211_sme_get_conn_ies()
486 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_connect() local
490 if (!rdev->ops->auth || !rdev->ops->assoc) cfg80211_sme_connect()
568 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_sme_disconnect() local
574 if (!rdev->ops->deauth) cfg80211_sme_disconnect()
584 err = cfg80211_mlme_deauth(rdev, wdev->netdev, cfg80211_sme_disconnect()
598 struct cfg80211_registered_device *rdev; cfg80211_is_all_idle() local
607 list_for_each_entry(rdev, &cfg80211_rdev_list, list) { cfg80211_is_all_idle()
608 list_for_each_entry(wdev, &rdev->wdev_list, list) { cfg80211_is_all_idle()
750 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_connect_result() local
776 queue_work(cfg80211_wq, &rdev->event_work); cfg80211_connect_result()
865 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_roamed_bss() local
890 queue_work(cfg80211_wq, &rdev->event_work); cfg80211_roamed_bss()
898 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); __cfg80211_disconnected() local
918 nl80211_send_disconnected(rdev, dev, reason, ie, ie_len, from_ap); __cfg80211_disconnected()
924 if (rdev->ops->del_key) __cfg80211_disconnected()
926 rdev_del_key(rdev, dev, i, false, NULL); __cfg80211_disconnected()
928 rdev_set_qos_map(rdev, dev, NULL); __cfg80211_disconnected()
945 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wdev->wiphy); cfg80211_disconnected() local
963 queue_work(cfg80211_wq, &rdev->event_work); cfg80211_disconnected()
970 int cfg80211_connect(struct cfg80211_registered_device *rdev, cfg80211_connect() argument
987 rdev->wiphy.ht_capa_mod_mask); cfg80211_connect()
1020 if (!rdev->ops->connect) cfg80211_connect()
1023 err = rdev_connect(rdev, dev, connect); cfg80211_connect()
1034 int cfg80211_disconnect(struct cfg80211_registered_device *rdev, cfg80211_disconnect() argument
1047 else if (!rdev->ops->disconnect) cfg80211_disconnect()
1048 cfg80211_mlme_down(rdev, dev); cfg80211_disconnect()
1050 err = rdev_disconnect(rdev, dev, reason); cfg80211_disconnect()
/linux-4.4.14/drivers/regulator/
H A Dhelpers.c25 * @rdev: regulator to operate on
31 int regulator_is_enabled_regmap(struct regulator_dev *rdev) regulator_is_enabled_regmap() argument
36 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); regulator_is_enabled_regmap()
40 val &= rdev->desc->enable_mask; regulator_is_enabled_regmap()
42 if (rdev->desc->enable_is_inverted) { regulator_is_enabled_regmap()
43 if (rdev->desc->enable_val) regulator_is_enabled_regmap()
44 return val != rdev->desc->enable_val; regulator_is_enabled_regmap()
47 if (rdev->desc->enable_val) regulator_is_enabled_regmap()
48 return val == rdev->desc->enable_val; regulator_is_enabled_regmap()
57 * @rdev: regulator to operate on
63 int regulator_enable_regmap(struct regulator_dev *rdev) regulator_enable_regmap() argument
67 if (rdev->desc->enable_is_inverted) { regulator_enable_regmap()
68 val = rdev->desc->disable_val; regulator_enable_regmap()
70 val = rdev->desc->enable_val; regulator_enable_regmap()
72 val = rdev->desc->enable_mask; regulator_enable_regmap()
75 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, regulator_enable_regmap()
76 rdev->desc->enable_mask, val); regulator_enable_regmap()
83 * @rdev: regulator to operate on
89 int regulator_disable_regmap(struct regulator_dev *rdev) regulator_disable_regmap() argument
93 if (rdev->desc->enable_is_inverted) { regulator_disable_regmap()
94 val = rdev->desc->enable_val; regulator_disable_regmap()
96 val = rdev->desc->enable_mask; regulator_disable_regmap()
98 val = rdev->desc->disable_val; regulator_disable_regmap()
101 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, regulator_disable_regmap()
102 rdev->desc->enable_mask, val); regulator_disable_regmap()
109 * @rdev: regulator to operate on
115 int regulator_get_voltage_sel_regmap(struct regulator_dev *rdev) regulator_get_voltage_sel_regmap() argument
120 ret = regmap_read(rdev->regmap, rdev->desc->vsel_reg, &val); regulator_get_voltage_sel_regmap()
124 val &= rdev->desc->vsel_mask; regulator_get_voltage_sel_regmap()
125 val >>= ffs(rdev->desc->vsel_mask) - 1; regulator_get_voltage_sel_regmap()
134 * @rdev: regulator to operate on
141 int regulator_set_voltage_sel_regmap(struct regulator_dev *rdev, unsigned sel) regulator_set_voltage_sel_regmap() argument
145 sel <<= ffs(rdev->desc->vsel_mask) - 1; regulator_set_voltage_sel_regmap()
147 ret = regmap_update_bits(rdev->regmap, rdev->desc->vsel_reg, regulator_set_voltage_sel_regmap()
148 rdev->desc->vsel_mask, sel); regulator_set_voltage_sel_regmap()
152 if (rdev->desc->apply_bit) regulator_set_voltage_sel_regmap()
153 ret = regmap_update_bits(rdev->regmap, rdev->desc->apply_reg, regulator_set_voltage_sel_regmap()
154 rdev->desc->apply_bit, regulator_set_voltage_sel_regmap()
155 rdev->desc->apply_bit); regulator_set_voltage_sel_regmap()
163 * @rdev: Regulator to operate on
172 int regulator_map_voltage_iterate(struct regulator_dev *rdev, regulator_map_voltage_iterate() argument
182 for (i = 0; i < rdev->desc->n_voltages; i++) { regulator_map_voltage_iterate()
183 ret = rdev->desc->ops->list_voltage(rdev, i); regulator_map_voltage_iterate()
203 * @rdev: Regulator to operate on
210 int regulator_map_voltage_ascend(struct regulator_dev *rdev, regulator_map_voltage_ascend() argument
215 for (i = 0; i < rdev->desc->n_voltages; i++) { regulator_map_voltage_ascend()
216 ret = rdev->desc->ops->list_voltage(rdev, i); regulator_map_voltage_ascend()
234 * @rdev: Regulator to operate on
241 int regulator_map_voltage_linear(struct regulator_dev *rdev, regulator_map_voltage_linear() argument
247 if (rdev->desc->n_voltages == 1 && rdev->desc->uV_step == 0) { regulator_map_voltage_linear()
248 if (min_uV <= rdev->desc->min_uV && rdev->desc->min_uV <= max_uV) regulator_map_voltage_linear()
254 if (!rdev->desc->uV_step) { regulator_map_voltage_linear()
255 BUG_ON(!rdev->desc->uV_step); regulator_map_voltage_linear()
259 if (min_uV < rdev->desc->min_uV) regulator_map_voltage_linear()
260 min_uV = rdev->desc->min_uV; regulator_map_voltage_linear()
262 ret = DIV_ROUND_UP(min_uV - rdev->desc->min_uV, rdev->desc->uV_step); regulator_map_voltage_linear()
266 ret += rdev->desc->linear_min_sel; regulator_map_voltage_linear()
269 voltage = rdev->desc->ops->list_voltage(rdev, ret); regulator_map_voltage_linear()
280 * @rdev: Regulator to operate on
287 int regulator_map_voltage_linear_range(struct regulator_dev *rdev, regulator_map_voltage_linear_range() argument
294 if (!rdev->desc->n_linear_ranges) { regulator_map_voltage_linear_range()
295 BUG_ON(!rdev->desc->n_linear_ranges); regulator_map_voltage_linear_range()
299 for (i = 0; i < rdev->desc->n_linear_ranges; i++) { regulator_map_voltage_linear_range()
302 range = &rdev->desc->linear_ranges[i]; regulator_map_voltage_linear_range()
327 if (i == rdev->desc->n_linear_ranges) regulator_map_voltage_linear_range()
331 voltage = rdev->desc->ops->list_voltage(rdev, ret); regulator_map_voltage_linear_range()
342 * @rdev: Regulator device
349 int regulator_list_voltage_linear(struct regulator_dev *rdev, regulator_list_voltage_linear() argument
352 if (selector >= rdev->desc->n_voltages) regulator_list_voltage_linear()
354 if (selector < rdev->desc->linear_min_sel) regulator_list_voltage_linear()
357 selector -= rdev->desc->linear_min_sel; regulator_list_voltage_linear()
359 return rdev->desc->min_uV + (rdev->desc->uV_step * selector); regulator_list_voltage_linear()
366 * @rdev: Regulator device
373 int regulator_list_voltage_linear_range(struct regulator_dev *rdev, regulator_list_voltage_linear_range() argument
379 if (!rdev->desc->n_linear_ranges) { regulator_list_voltage_linear_range()
380 BUG_ON(!rdev->desc->n_linear_ranges); regulator_list_voltage_linear_range()
384 for (i = 0; i < rdev->desc->n_linear_ranges; i++) { regulator_list_voltage_linear_range()
385 range = &rdev->desc->linear_ranges[i]; regulator_list_voltage_linear_range()
403 * @rdev: Regulator device
410 int regulator_list_voltage_table(struct regulator_dev *rdev, regulator_list_voltage_table() argument
413 if (!rdev->desc->volt_table) { regulator_list_voltage_table()
414 BUG_ON(!rdev->desc->volt_table); regulator_list_voltage_table()
418 if (selector >= rdev->desc->n_voltages) regulator_list_voltage_table()
421 return rdev->desc->volt_table[selector]; regulator_list_voltage_table()
428 * @rdev: device to operate on.
431 int regulator_set_bypass_regmap(struct regulator_dev *rdev, bool enable) regulator_set_bypass_regmap() argument
436 val = rdev->desc->bypass_val_on; regulator_set_bypass_regmap()
438 val = rdev->desc->bypass_mask; regulator_set_bypass_regmap()
440 val = rdev->desc->bypass_val_off; regulator_set_bypass_regmap()
443 return regmap_update_bits(rdev->regmap, rdev->desc->bypass_reg, regulator_set_bypass_regmap()
444 rdev->desc->bypass_mask, val); regulator_set_bypass_regmap()
451 * @rdev: device to operate on.
454 int regulator_get_bypass_regmap(struct regulator_dev *rdev, bool *enable) regulator_get_bypass_regmap() argument
459 ret = regmap_read(rdev->regmap, rdev->desc->bypass_reg, &val); regulator_get_bypass_regmap()
463 *enable = val & rdev->desc->bypass_mask; regulator_get_bypass_regmap()
H A Dcore.c42 #define rdev_crit(rdev, fmt, ...) \
43 pr_crit("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
44 #define rdev_err(rdev, fmt, ...) \
45 pr_err("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
46 #define rdev_warn(rdev, fmt, ...) \
47 pr_warn("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
48 #define rdev_info(rdev, fmt, ...) \
49 pr_info("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
50 #define rdev_dbg(rdev, fmt, ...) \
51 pr_debug("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
101 static int _regulator_is_enabled(struct regulator_dev *rdev);
102 static int _regulator_disable(struct regulator_dev *rdev);
103 static int _regulator_get_voltage(struct regulator_dev *rdev);
104 static int _regulator_get_current_limit(struct regulator_dev *rdev);
105 static unsigned int _regulator_get_mode(struct regulator_dev *rdev);
106 static int _notifier_call_chain(struct regulator_dev *rdev,
108 static int _regulator_do_set_voltage(struct regulator_dev *rdev,
110 static struct regulator *create_regulator(struct regulator_dev *rdev,
120 static const char *rdev_get_name(struct regulator_dev *rdev) rdev_get_name() argument
122 if (rdev->constraints && rdev->constraints->name) rdev_get_name()
123 return rdev->constraints->name; rdev_get_name()
124 else if (rdev->desc->name) rdev_get_name()
125 return rdev->desc->name; rdev_get_name()
135 static inline struct regulator_dev *rdev_get_supply(struct regulator_dev *rdev) rdev_get_supply() argument
137 if (rdev && rdev->supply) rdev_get_supply()
138 return rdev->supply->rdev; rdev_get_supply()
145 * @rdev: regulator source
147 static void regulator_lock_supply(struct regulator_dev *rdev) regulator_lock_supply() argument
151 for (i = 0; rdev; rdev = rdev_get_supply(rdev), i++) regulator_lock_supply()
152 mutex_lock_nested(&rdev->mutex, i); regulator_lock_supply()
157 * @rdev: regulator source
159 static void regulator_unlock_supply(struct regulator_dev *rdev) regulator_unlock_supply() argument
164 mutex_unlock(&rdev->mutex); regulator_unlock_supply()
165 supply = rdev->supply; regulator_unlock_supply()
167 if (!rdev->supply) regulator_unlock_supply()
170 rdev = supply->rdev; regulator_unlock_supply()
201 static int _regulator_can_change_status(struct regulator_dev *rdev) _regulator_can_change_status() argument
203 if (!rdev->constraints) _regulator_can_change_status()
206 if (rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_STATUS) _regulator_can_change_status()
213 static int regulator_check_voltage(struct regulator_dev *rdev, regulator_check_voltage() argument
218 if (!rdev->constraints) { regulator_check_voltage()
219 rdev_err(rdev, "no constraints\n"); regulator_check_voltage()
222 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_VOLTAGE)) { regulator_check_voltage()
223 rdev_err(rdev, "voltage operation not allowed\n"); regulator_check_voltage()
227 if (*max_uV > rdev->constraints->max_uV) regulator_check_voltage()
228 *max_uV = rdev->constraints->max_uV; regulator_check_voltage()
229 if (*min_uV < rdev->constraints->min_uV) regulator_check_voltage()
230 *min_uV = rdev->constraints->min_uV; regulator_check_voltage()
233 rdev_err(rdev, "unsupportable voltage range: %d-%duV\n", regulator_check_voltage()
244 static int regulator_check_consumers(struct regulator_dev *rdev, regulator_check_consumers() argument
249 list_for_each_entry(regulator, &rdev->consumer_list, list) { regulator_check_consumers()
264 rdev_err(rdev, "Restricting voltage, %u-%uuV\n", regulator_check_consumers()
273 static int regulator_check_current_limit(struct regulator_dev *rdev, regulator_check_current_limit() argument
278 if (!rdev->constraints) { regulator_check_current_limit()
279 rdev_err(rdev, "no constraints\n"); regulator_check_current_limit()
282 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_CURRENT)) { regulator_check_current_limit()
283 rdev_err(rdev, "current operation not allowed\n"); regulator_check_current_limit()
287 if (*max_uA > rdev->constraints->max_uA) regulator_check_current_limit()
288 *max_uA = rdev->constraints->max_uA; regulator_check_current_limit()
289 if (*min_uA < rdev->constraints->min_uA) regulator_check_current_limit()
290 *min_uA = rdev->constraints->min_uA; regulator_check_current_limit()
293 rdev_err(rdev, "unsupportable current range: %d-%duA\n", regulator_check_current_limit()
302 static int regulator_mode_constrain(struct regulator_dev *rdev, int *mode) regulator_mode_constrain() argument
311 rdev_err(rdev, "invalid mode %x specified\n", *mode); regulator_mode_constrain()
315 if (!rdev->constraints) { regulator_mode_constrain()
316 rdev_err(rdev, "no constraints\n"); regulator_mode_constrain()
319 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_MODE)) { regulator_mode_constrain()
320 rdev_err(rdev, "mode operation not allowed\n"); regulator_mode_constrain()
328 if (rdev->constraints->valid_modes_mask & *mode) regulator_mode_constrain()
337 static int regulator_check_drms(struct regulator_dev *rdev) regulator_check_drms() argument
339 if (!rdev->constraints) { regulator_check_drms()
340 rdev_err(rdev, "no constraints\n"); regulator_check_drms()
343 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_DRMS)) { regulator_check_drms()
344 rdev_dbg(rdev, "drms operation not allowed\n"); regulator_check_drms()
353 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_uV_show() local
356 mutex_lock(&rdev->mutex); regulator_uV_show()
357 ret = sprintf(buf, "%d\n", _regulator_get_voltage(rdev)); regulator_uV_show()
358 mutex_unlock(&rdev->mutex); regulator_uV_show()
367 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_uA_show() local
369 return sprintf(buf, "%d\n", _regulator_get_current_limit(rdev)); regulator_uA_show()
376 struct regulator_dev *rdev = dev_get_drvdata(dev); name_show() local
378 return sprintf(buf, "%s\n", rdev_get_name(rdev)); name_show()
400 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_opmode_show() local
402 return regulator_print_opmode(buf, _regulator_get_mode(rdev)); regulator_opmode_show()
419 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_state_show() local
422 mutex_lock(&rdev->mutex); regulator_state_show()
423 ret = regulator_print_state(buf, _regulator_is_enabled(rdev)); regulator_state_show()
424 mutex_unlock(&rdev->mutex); regulator_state_show()
433 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_status_show() local
437 status = rdev->desc->ops->get_status(rdev); regulator_status_show()
480 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_min_uA_show() local
482 if (!rdev->constraints) regulator_min_uA_show()
485 return sprintf(buf, "%d\n", rdev->constraints->min_uA); regulator_min_uA_show()
492 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_max_uA_show() local
494 if (!rdev->constraints) regulator_max_uA_show()
497 return sprintf(buf, "%d\n", rdev->constraints->max_uA); regulator_max_uA_show()
504 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_min_uV_show() local
506 if (!rdev->constraints) regulator_min_uV_show()
509 return sprintf(buf, "%d\n", rdev->constraints->min_uV); regulator_min_uV_show()
516 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_max_uV_show() local
518 if (!rdev->constraints) regulator_max_uV_show()
521 return sprintf(buf, "%d\n", rdev->constraints->max_uV); regulator_max_uV_show()
528 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_total_uA_show() local
532 mutex_lock(&rdev->mutex); regulator_total_uA_show()
533 list_for_each_entry(regulator, &rdev->consumer_list, list) regulator_total_uA_show()
535 mutex_unlock(&rdev->mutex); regulator_total_uA_show()
543 struct regulator_dev *rdev = dev_get_drvdata(dev); num_users_show() local
544 return sprintf(buf, "%d\n", rdev->use_count); num_users_show()
551 struct regulator_dev *rdev = dev_get_drvdata(dev); type_show() local
553 switch (rdev->desc->type) { type_show()
566 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_mem_uV_show() local
568 return sprintf(buf, "%d\n", rdev->constraints->state_mem.uV); regulator_suspend_mem_uV_show()
576 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_disk_uV_show() local
578 return sprintf(buf, "%d\n", rdev->constraints->state_disk.uV); regulator_suspend_disk_uV_show()
586 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_standby_uV_show() local
588 return sprintf(buf, "%d\n", rdev->constraints->state_standby.uV); regulator_suspend_standby_uV_show()
596 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_mem_mode_show() local
599 rdev->constraints->state_mem.mode); regulator_suspend_mem_mode_show()
607 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_disk_mode_show() local
610 rdev->constraints->state_disk.mode); regulator_suspend_disk_mode_show()
618 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_standby_mode_show() local
621 rdev->constraints->state_standby.mode); regulator_suspend_standby_mode_show()
629 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_mem_state_show() local
632 rdev->constraints->state_mem.enabled); regulator_suspend_mem_state_show()
640 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_disk_state_show() local
643 rdev->constraints->state_disk.enabled); regulator_suspend_disk_state_show()
651 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_suspend_standby_state_show() local
654 rdev->constraints->state_standby.enabled); regulator_suspend_standby_state_show()
662 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_bypass_show() local
667 ret = rdev->desc->ops->get_bypass(rdev, &bypass); regulator_bypass_show()
683 static int drms_uA_update(struct regulator_dev *rdev) drms_uA_update() argument
689 lockdep_assert_held_once(&rdev->mutex); drms_uA_update()
695 err = regulator_check_drms(rdev); drms_uA_update()
699 if (!rdev->desc->ops->get_optimum_mode && drms_uA_update()
700 !rdev->desc->ops->set_load) drms_uA_update()
703 if (!rdev->desc->ops->set_mode && drms_uA_update()
704 !rdev->desc->ops->set_load) drms_uA_update()
708 output_uV = _regulator_get_voltage(rdev); drms_uA_update()
710 rdev_err(rdev, "invalid output voltage found\n"); drms_uA_update()
716 if (rdev->supply) drms_uA_update()
717 input_uV = regulator_get_voltage(rdev->supply); drms_uA_update()
719 input_uV = rdev->constraints->input_uV; drms_uA_update()
721 rdev_err(rdev, "invalid input voltage found\n"); drms_uA_update()
726 list_for_each_entry(sibling, &rdev->consumer_list, list) drms_uA_update()
729 current_uA += rdev->constraints->system_load; drms_uA_update()
731 if (rdev->desc->ops->set_load) { drms_uA_update()
733 err = rdev->desc->ops->set_load(rdev, current_uA); drms_uA_update()
735 rdev_err(rdev, "failed to set load %d\n", current_uA); drms_uA_update()
738 mode = rdev->desc->ops->get_optimum_mode(rdev, input_uV, drms_uA_update()
742 err = regulator_mode_constrain(rdev, &mode); drms_uA_update()
744 rdev_err(rdev, "failed to get optimum mode @ %d uA %d -> %d uV\n", drms_uA_update()
749 err = rdev->desc->ops->set_mode(rdev, mode); drms_uA_update()
751 rdev_err(rdev, "failed to set optimum mode %x\n", mode); drms_uA_update()
757 static int suspend_set_state(struct regulator_dev *rdev, suspend_set_state() argument
767 if (rdev->desc->ops->set_suspend_voltage || suspend_set_state()
768 rdev->desc->ops->set_suspend_mode) suspend_set_state()
769 rdev_warn(rdev, "No configuration\n"); suspend_set_state()
774 rdev_err(rdev, "invalid configuration\n"); suspend_set_state()
778 if (rstate->enabled && rdev->desc->ops->set_suspend_enable) suspend_set_state()
779 ret = rdev->desc->ops->set_suspend_enable(rdev); suspend_set_state()
780 else if (rstate->disabled && rdev->desc->ops->set_suspend_disable) suspend_set_state()
781 ret = rdev->desc->ops->set_suspend_disable(rdev); suspend_set_state()
786 rdev_err(rdev, "failed to enabled/disable\n"); suspend_set_state()
790 if (rdev->desc->ops->set_suspend_voltage && rstate->uV > 0) { suspend_set_state()
791 ret = rdev->desc->ops->set_suspend_voltage(rdev, rstate->uV); suspend_set_state()
793 rdev_err(rdev, "failed to set voltage\n"); suspend_set_state()
798 if (rdev->desc->ops->set_suspend_mode && rstate->mode > 0) { suspend_set_state()
799 ret = rdev->desc->ops->set_suspend_mode(rdev, rstate->mode); suspend_set_state()
801 rdev_err(rdev, "failed to set mode\n"); suspend_set_state()
809 static int suspend_prepare(struct regulator_dev *rdev, suspend_state_t state) suspend_prepare() argument
811 lockdep_assert_held_once(&rdev->mutex); suspend_prepare()
813 if (!rdev->constraints) suspend_prepare()
818 return suspend_set_state(rdev, suspend_prepare()
819 &rdev->constraints->state_standby); suspend_prepare()
821 return suspend_set_state(rdev, suspend_prepare()
822 &rdev->constraints->state_mem); suspend_prepare()
824 return suspend_set_state(rdev, suspend_prepare()
825 &rdev->constraints->state_disk); suspend_prepare()
831 static void print_constraints(struct regulator_dev *rdev) print_constraints() argument
833 struct regulation_constraints *constraints = rdev->constraints; print_constraints()
852 ret = _regulator_get_voltage(rdev); print_constraints()
875 ret = _regulator_get_current_limit(rdev); print_constraints()
893 rdev_dbg(rdev, "%s\n", buf); print_constraints()
897 rdev_warn(rdev, print_constraints()
901 static int machine_constraints_voltage(struct regulator_dev *rdev, machine_constraints_voltage() argument
904 const struct regulator_ops *ops = rdev->desc->ops; machine_constraints_voltage()
908 if (rdev->constraints->apply_uV && machine_constraints_voltage()
909 rdev->constraints->min_uV == rdev->constraints->max_uV) { machine_constraints_voltage()
910 int current_uV = _regulator_get_voltage(rdev); machine_constraints_voltage()
912 rdev_err(rdev, machine_constraints_voltage()
917 if (current_uV < rdev->constraints->min_uV || machine_constraints_voltage()
918 current_uV > rdev->constraints->max_uV) { machine_constraints_voltage()
920 rdev, rdev->constraints->min_uV, machine_constraints_voltage()
921 rdev->constraints->max_uV); machine_constraints_voltage()
923 rdev_err(rdev, machine_constraints_voltage()
925 rdev->constraints->min_uV, ret); machine_constraints_voltage()
934 if (ops->list_voltage && rdev->desc->n_voltages) { machine_constraints_voltage()
935 int count = rdev->desc->n_voltages; machine_constraints_voltage()
957 rdev_err(rdev, "invalid voltage constraints\n"); machine_constraints_voltage()
965 value = ops->list_voltage(rdev, i); machine_constraints_voltage()
978 rdev_err(rdev, machine_constraints_voltage()
986 rdev_dbg(rdev, "override min_uV, %d -> %d\n", machine_constraints_voltage()
991 rdev_dbg(rdev, "override max_uV, %d -> %d\n", machine_constraints_voltage()
1000 static int machine_constraints_current(struct regulator_dev *rdev, machine_constraints_current() argument
1003 const struct regulator_ops *ops = rdev->desc->ops; machine_constraints_current()
1010 rdev_err(rdev, "Invalid current constraints\n"); machine_constraints_current()
1015 rdev_warn(rdev, "Operation of current configuration missing\n"); machine_constraints_current()
1020 ret = ops->set_current_limit(rdev, constraints->min_uA, machine_constraints_current()
1023 rdev_err(rdev, "Failed to set current constraint, %d\n", ret); machine_constraints_current()
1030 static int _regulator_do_enable(struct regulator_dev *rdev);
1034 * @rdev: regulator source
1043 static int set_machine_constraints(struct regulator_dev *rdev, set_machine_constraints() argument
1047 const struct regulator_ops *ops = rdev->desc->ops; set_machine_constraints()
1050 rdev->constraints = kmemdup(constraints, sizeof(*constraints), set_machine_constraints()
1053 rdev->constraints = kzalloc(sizeof(*constraints), set_machine_constraints()
1055 if (!rdev->constraints) set_machine_constraints()
1058 ret = machine_constraints_voltage(rdev, rdev->constraints); set_machine_constraints()
1062 ret = machine_constraints_current(rdev, rdev->constraints); set_machine_constraints()
1066 if (rdev->constraints->ilim_uA && ops->set_input_current_limit) { set_machine_constraints()
1067 ret = ops->set_input_current_limit(rdev, set_machine_constraints()
1068 rdev->constraints->ilim_uA); set_machine_constraints()
1070 rdev_err(rdev, "failed to set input limit\n"); set_machine_constraints()
1076 if (rdev->constraints->initial_state) { set_machine_constraints()
1077 ret = suspend_prepare(rdev, rdev->constraints->initial_state); set_machine_constraints()
1079 rdev_err(rdev, "failed to set suspend state\n"); set_machine_constraints()
1084 if (rdev->constraints->initial_mode) { set_machine_constraints()
1086 rdev_err(rdev, "no set_mode operation\n"); set_machine_constraints()
1091 ret = ops->set_mode(rdev, rdev->constraints->initial_mode); set_machine_constraints()
1093 rdev_err(rdev, "failed to set initial mode: %d\n", ret); set_machine_constraints()
1101 if (rdev->constraints->always_on || rdev->constraints->boot_on) { set_machine_constraints()
1102 ret = _regulator_do_enable(rdev); set_machine_constraints()
1104 rdev_err(rdev, "failed to enable\n"); set_machine_constraints()
1109 if ((rdev->constraints->ramp_delay || rdev->constraints->ramp_disable) set_machine_constraints()
1111 ret = ops->set_ramp_delay(rdev, rdev->constraints->ramp_delay); set_machine_constraints()
1113 rdev_err(rdev, "failed to set ramp_delay\n"); set_machine_constraints()
1118 if (rdev->constraints->pull_down && ops->set_pull_down) { set_machine_constraints()
1119 ret = ops->set_pull_down(rdev); set_machine_constraints()
1121 rdev_err(rdev, "failed to set pull down\n"); set_machine_constraints()
1126 if (rdev->constraints->soft_start && ops->set_soft_start) { set_machine_constraints()
1127 ret = ops->set_soft_start(rdev); set_machine_constraints()
1129 rdev_err(rdev, "failed to set soft start\n"); set_machine_constraints()
1134 if (rdev->constraints->over_current_protection set_machine_constraints()
1136 ret = ops->set_over_current_protection(rdev); set_machine_constraints()
1138 rdev_err(rdev, "failed to set over current protection\n"); set_machine_constraints()
1143 print_constraints(rdev); set_machine_constraints()
1146 kfree(rdev->constraints); set_machine_constraints()
1147 rdev->constraints = NULL; set_machine_constraints()
1153 * @rdev: regulator name
1160 static int set_supply(struct regulator_dev *rdev, set_supply() argument
1165 rdev_info(rdev, "supplied by %s\n", rdev_get_name(supply_rdev)); set_supply()
1170 rdev->supply = create_regulator(supply_rdev, &rdev->dev, "SUPPLY"); set_supply()
1171 if (rdev->supply == NULL) { set_supply()
1182 * @rdev: regulator source
1191 static int set_consumer_device_supply(struct regulator_dev *rdev, set_consumer_device_supply() argument
1222 dev_name(&rdev->dev), rdev_get_name(rdev)); set_consumer_device_supply()
1230 node->regulator = rdev; set_consumer_device_supply()
1245 static void unset_regulator_supplies(struct regulator_dev *rdev) unset_regulator_supplies() argument
1250 if (rdev == node->regulator) { unset_regulator_supplies()
1260 static struct regulator *create_regulator(struct regulator_dev *rdev, create_regulator() argument
1272 mutex_lock(&rdev->mutex); create_regulator()
1273 regulator->rdev = rdev; create_regulator()
1274 list_add(&regulator->list, &rdev->consumer_list); create_regulator()
1289 err = sysfs_create_link_nowarn(&rdev->dev.kobj, &dev->kobj, create_regulator()
1292 rdev_dbg(rdev, "could not add device link %s err %d\n", create_regulator()
1303 rdev->debugfs); create_regulator()
1305 rdev_dbg(rdev, "Failed to create debugfs directory\n"); create_regulator()
1320 if (!_regulator_can_change_status(rdev) && create_regulator()
1321 _regulator_is_enabled(rdev)) create_regulator()
1324 mutex_unlock(&rdev->mutex); create_regulator()
1329 mutex_unlock(&rdev->mutex); create_regulator()
1333 static int _regulator_get_enable_time(struct regulator_dev *rdev) _regulator_get_enable_time() argument
1335 if (rdev->constraints && rdev->constraints->enable_time) _regulator_get_enable_time()
1336 return rdev->constraints->enable_time; _regulator_get_enable_time()
1337 if (!rdev->desc->ops->enable_time) _regulator_get_enable_time()
1338 return rdev->desc->enable_time; _regulator_get_enable_time()
1339 return rdev->desc->ops->enable_time(rdev); _regulator_get_enable_time()
1466 static int regulator_resolve_supply(struct regulator_dev *rdev) regulator_resolve_supply() argument
1469 struct device *dev = rdev->dev.parent; regulator_resolve_supply()
1473 if (!rdev->supply_name) regulator_resolve_supply()
1477 if (rdev->supply) regulator_resolve_supply()
1480 r = regulator_dev_lookup(dev, rdev->supply_name, &ret); regulator_resolve_supply()
1499 rdev->supply_name, rdev->desc->name); regulator_resolve_supply()
1511 ret = set_supply(rdev, r); regulator_resolve_supply()
1518 if (_regulator_is_enabled(rdev) && rdev->supply) { regulator_resolve_supply()
1519 ret = regulator_enable(rdev->supply); regulator_resolve_supply()
1521 _regulator_put(rdev->supply); regulator_resolve_supply()
1533 struct regulator_dev *rdev; _regulator_get() local
1551 rdev = regulator_dev_lookup(dev, id, &ret); _regulator_get()
1552 if (rdev) _regulator_get()
1575 rdev = dummy_regulator_rdev; _regulator_get()
1576 get_device(&rdev->dev); _regulator_get()
1586 if (rdev->exclusive) { _regulator_get()
1588 put_device(&rdev->dev); _regulator_get()
1592 if (exclusive && rdev->open_count) { _regulator_get()
1594 put_device(&rdev->dev); _regulator_get()
1598 ret = regulator_resolve_supply(rdev); _regulator_get()
1601 put_device(&rdev->dev); _regulator_get()
1605 if (!try_module_get(rdev->owner)) { _regulator_get()
1606 put_device(&rdev->dev); _regulator_get()
1610 regulator = create_regulator(rdev, dev, id); _regulator_get()
1613 put_device(&rdev->dev); _regulator_get()
1614 module_put(rdev->owner); _regulator_get()
1618 rdev->open_count++; _regulator_get()
1620 rdev->exclusive = 1; _regulator_get()
1622 ret = _regulator_is_enabled(rdev); _regulator_get()
1624 rdev->use_count = 1; _regulator_get()
1626 rdev->use_count = 0; _regulator_get()
1707 struct regulator_dev *rdev; _regulator_put() local
1714 rdev = regulator->rdev; _regulator_put()
1720 sysfs_remove_link(&rdev->dev.kobj, regulator->supply_name); _regulator_put()
1721 mutex_lock(&rdev->mutex); _regulator_put()
1724 rdev->open_count--; _regulator_put()
1725 rdev->exclusive = 0; _regulator_put()
1726 put_device(&rdev->dev); _regulator_put()
1727 mutex_unlock(&rdev->mutex); _regulator_put()
1732 module_put(rdev->owner); _regulator_put()
1881 static int regulator_ena_gpio_request(struct regulator_dev *rdev, regulator_ena_gpio_request() argument
1892 rdev_dbg(rdev, "GPIO %d is already used\n", regulator_ena_gpio_request()
1900 rdev_get_name(rdev)); regulator_ena_gpio_request()
1916 rdev->ena_pin = pin; regulator_ena_gpio_request()
1920 static void regulator_ena_gpio_free(struct regulator_dev *rdev) regulator_ena_gpio_free() argument
1924 if (!rdev->ena_pin) regulator_ena_gpio_free()
1929 if (pin->gpiod == rdev->ena_pin->gpiod) { regulator_ena_gpio_free()
1935 rdev->ena_pin = NULL; regulator_ena_gpio_free()
1946 * @rdev: regulator_dev structure
1952 static int regulator_ena_gpio_ctrl(struct regulator_dev *rdev, bool enable) regulator_ena_gpio_ctrl() argument
1954 struct regulator_enable_gpio *pin = rdev->ena_pin; regulator_ena_gpio_ctrl()
2022 static int _regulator_do_enable(struct regulator_dev *rdev) _regulator_do_enable() argument
2027 ret = _regulator_get_enable_time(rdev); _regulator_do_enable()
2031 rdev_warn(rdev, "enable_time() failed: %d\n", ret); _regulator_do_enable()
2035 trace_regulator_enable(rdev_get_name(rdev)); _regulator_do_enable()
2037 if (rdev->desc->off_on_delay) { _regulator_do_enable()
2044 max_delay = usecs_to_jiffies(rdev->desc->off_on_delay); _regulator_do_enable()
2045 intended = rdev->last_off_jiffy + max_delay; _regulator_do_enable()
2062 if (rdev->ena_pin) { _regulator_do_enable()
2063 if (!rdev->ena_gpio_state) { _regulator_do_enable()
2064 ret = regulator_ena_gpio_ctrl(rdev, true); _regulator_do_enable()
2067 rdev->ena_gpio_state = 1; _regulator_do_enable()
2069 } else if (rdev->desc->ops->enable) { _regulator_do_enable()
2070 ret = rdev->desc->ops->enable(rdev); _regulator_do_enable()
2080 trace_regulator_enable_delay(rdev_get_name(rdev)); _regulator_do_enable()
2084 trace_regulator_enable_complete(rdev_get_name(rdev)); _regulator_do_enable()
2090 static int _regulator_enable(struct regulator_dev *rdev) _regulator_enable() argument
2094 lockdep_assert_held_once(&rdev->mutex); _regulator_enable()
2097 if (rdev->constraints && _regulator_enable()
2098 (rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_DRMS)) _regulator_enable()
2099 drms_uA_update(rdev); _regulator_enable()
2101 if (rdev->use_count == 0) { _regulator_enable()
2103 ret = _regulator_is_enabled(rdev); _regulator_enable()
2105 if (!_regulator_can_change_status(rdev)) _regulator_enable()
2108 ret = _regulator_do_enable(rdev); _regulator_enable()
2113 rdev_err(rdev, "is_enabled() failed: %d\n", ret); _regulator_enable()
2119 rdev->use_count++; _regulator_enable()
2137 struct regulator_dev *rdev = regulator->rdev; regulator_enable() local
2143 if (rdev->supply) { regulator_enable()
2144 ret = regulator_enable(rdev->supply); regulator_enable()
2149 mutex_lock(&rdev->mutex); regulator_enable()
2150 ret = _regulator_enable(rdev); regulator_enable()
2151 mutex_unlock(&rdev->mutex); regulator_enable()
2153 if (ret != 0 && rdev->supply) regulator_enable()
2154 regulator_disable(rdev->supply); regulator_enable()
2160 static int _regulator_do_disable(struct regulator_dev *rdev) _regulator_do_disable() argument
2164 trace_regulator_disable(rdev_get_name(rdev)); _regulator_do_disable()
2166 if (rdev->ena_pin) { _regulator_do_disable()
2167 if (rdev->ena_gpio_state) { _regulator_do_disable()
2168 ret = regulator_ena_gpio_ctrl(rdev, false); _regulator_do_disable()
2171 rdev->ena_gpio_state = 0; _regulator_do_disable()
2174 } else if (rdev->desc->ops->disable) { _regulator_do_disable()
2175 ret = rdev->desc->ops->disable(rdev); _regulator_do_disable()
2183 if (rdev->desc->off_on_delay) _regulator_do_disable()
2184 rdev->last_off_jiffy = jiffies; _regulator_do_disable()
2186 trace_regulator_disable_complete(rdev_get_name(rdev)); _regulator_do_disable()
2192 static int _regulator_disable(struct regulator_dev *rdev) _regulator_disable() argument
2196 lockdep_assert_held_once(&rdev->mutex); _regulator_disable()
2198 if (WARN(rdev->use_count <= 0, _regulator_disable()
2199 "unbalanced disables for %s\n", rdev_get_name(rdev))) _regulator_disable()
2203 if (rdev->use_count == 1 && _regulator_disable()
2204 (rdev->constraints && !rdev->constraints->always_on)) { _regulator_disable()
2207 if (_regulator_can_change_status(rdev)) { _regulator_disable()
2208 ret = _notifier_call_chain(rdev, _regulator_disable()
2214 ret = _regulator_do_disable(rdev); _regulator_disable()
2216 rdev_err(rdev, "failed to disable\n"); _regulator_disable()
2217 _notifier_call_chain(rdev, _regulator_disable()
2222 _notifier_call_chain(rdev, REGULATOR_EVENT_DISABLE, _regulator_disable()
2226 rdev->use_count = 0; _regulator_disable()
2227 } else if (rdev->use_count > 1) { _regulator_disable()
2229 if (rdev->constraints && _regulator_disable()
2230 (rdev->constraints->valid_ops_mask & _regulator_disable()
2232 drms_uA_update(rdev); _regulator_disable()
2234 rdev->use_count--; _regulator_disable()
2254 struct regulator_dev *rdev = regulator->rdev; regulator_disable() local
2260 mutex_lock(&rdev->mutex); regulator_disable()
2261 ret = _regulator_disable(rdev); regulator_disable()
2262 mutex_unlock(&rdev->mutex); regulator_disable()
2264 if (ret == 0 && rdev->supply) regulator_disable()
2265 regulator_disable(rdev->supply); regulator_disable()
2272 static int _regulator_force_disable(struct regulator_dev *rdev) _regulator_force_disable() argument
2276 lockdep_assert_held_once(&rdev->mutex); _regulator_force_disable()
2278 ret = _notifier_call_chain(rdev, REGULATOR_EVENT_FORCE_DISABLE | _regulator_force_disable()
2283 ret = _regulator_do_disable(rdev); _regulator_force_disable()
2285 rdev_err(rdev, "failed to force disable\n"); _regulator_force_disable()
2286 _notifier_call_chain(rdev, REGULATOR_EVENT_FORCE_DISABLE | _regulator_force_disable()
2291 _notifier_call_chain(rdev, REGULATOR_EVENT_FORCE_DISABLE | _regulator_force_disable()
2308 struct regulator_dev *rdev = regulator->rdev; regulator_force_disable() local
2311 mutex_lock(&rdev->mutex); regulator_force_disable()
2313 ret = _regulator_force_disable(regulator->rdev); regulator_force_disable()
2314 mutex_unlock(&rdev->mutex); regulator_force_disable()
2316 if (rdev->supply) regulator_force_disable()
2317 while (rdev->open_count--) regulator_force_disable()
2318 regulator_disable(rdev->supply); regulator_force_disable()
2326 struct regulator_dev *rdev = container_of(work, struct regulator_dev, regulator_disable_work() local
2330 mutex_lock(&rdev->mutex); regulator_disable_work()
2332 BUG_ON(!rdev->deferred_disables); regulator_disable_work()
2334 count = rdev->deferred_disables; regulator_disable_work()
2335 rdev->deferred_disables = 0; regulator_disable_work()
2338 ret = _regulator_disable(rdev); regulator_disable_work()
2340 rdev_err(rdev, "Deferred disable failed: %d\n", ret); regulator_disable_work()
2343 mutex_unlock(&rdev->mutex); regulator_disable_work()
2345 if (rdev->supply) { regulator_disable_work()
2347 ret = regulator_disable(rdev->supply); regulator_disable_work()
2349 rdev_err(rdev, regulator_disable_work()
2370 struct regulator_dev *rdev = regulator->rdev; regulator_disable_deferred() local
2379 mutex_lock(&rdev->mutex); regulator_disable_deferred()
2380 rdev->deferred_disables++; regulator_disable_deferred()
2381 mutex_unlock(&rdev->mutex); regulator_disable_deferred()
2384 &rdev->disable_work, regulator_disable_deferred()
2393 static int _regulator_is_enabled(struct regulator_dev *rdev) _regulator_is_enabled() argument
2396 if (rdev->ena_pin) _regulator_is_enabled()
2397 return rdev->ena_gpio_state; _regulator_is_enabled()
2400 if (!rdev->desc->ops->is_enabled) _regulator_is_enabled()
2403 return rdev->desc->ops->is_enabled(rdev); _regulator_is_enabled()
2409 struct regulator_dev *rdev = regulator->rdev; _regulator_list_voltage() local
2410 const struct regulator_ops *ops = rdev->desc->ops; _regulator_list_voltage()
2413 if (rdev->desc->fixed_uV && rdev->desc->n_voltages == 1 && !selector) _regulator_list_voltage()
2414 return rdev->desc->fixed_uV; _regulator_list_voltage()
2417 if (selector >= rdev->desc->n_voltages) _regulator_list_voltage()
2420 mutex_lock(&rdev->mutex); _regulator_list_voltage()
2421 ret = ops->list_voltage(rdev, selector); _regulator_list_voltage()
2423 mutex_unlock(&rdev->mutex); _regulator_list_voltage()
2424 } else if (rdev->supply) { _regulator_list_voltage()
2425 ret = _regulator_list_voltage(rdev->supply, selector, lock); _regulator_list_voltage()
2431 if (ret < rdev->constraints->min_uV) _regulator_list_voltage()
2433 else if (ret > rdev->constraints->max_uV) _regulator_list_voltage()
2459 mutex_lock(&regulator->rdev->mutex); regulator_is_enabled()
2460 ret = _regulator_is_enabled(regulator->rdev); regulator_is_enabled()
2461 mutex_unlock(&regulator->rdev->mutex); regulator_is_enabled()
2478 struct regulator_dev *rdev = regulator->rdev; regulator_can_change_voltage() local
2480 if (rdev->constraints && regulator_can_change_voltage()
2481 (rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_VOLTAGE)) { regulator_can_change_voltage()
2482 if (rdev->desc->n_voltages - rdev->desc->linear_min_sel > 1) regulator_can_change_voltage()
2485 if (rdev->desc->continuous_voltage_range && regulator_can_change_voltage()
2486 rdev->constraints->min_uV && rdev->constraints->max_uV && regulator_can_change_voltage()
2487 rdev->constraints->min_uV != rdev->constraints->max_uV) regulator_can_change_voltage()
2505 struct regulator_dev *rdev = regulator->rdev; regulator_count_voltages() local
2507 if (rdev->desc->n_voltages) regulator_count_voltages()
2508 return rdev->desc->n_voltages; regulator_count_voltages()
2510 if (!rdev->supply) regulator_count_voltages()
2513 return regulator_count_voltages(rdev->supply); regulator_count_voltages()
2542 struct regmap *map = regulator->rdev->regmap; regulator_get_regmap()
2565 struct regulator_dev *rdev = regulator->rdev; regulator_get_hardware_vsel_register() local
2566 const struct regulator_ops *ops = rdev->desc->ops; regulator_get_hardware_vsel_register()
2571 *vsel_reg = rdev->desc->vsel_reg; regulator_get_hardware_vsel_register()
2572 *vsel_mask = rdev->desc->vsel_mask; regulator_get_hardware_vsel_register()
2592 struct regulator_dev *rdev = regulator->rdev; regulator_list_hardware_vsel() local
2593 const struct regulator_ops *ops = rdev->desc->ops; regulator_list_hardware_vsel()
2595 if (selector >= rdev->desc->n_voltages) regulator_list_hardware_vsel()
2613 struct regulator_dev *rdev = regulator->rdev; regulator_get_linear_step() local
2615 return rdev->desc->uV_step; regulator_get_linear_step()
2631 struct regulator_dev *rdev = regulator->rdev; regulator_is_supported_voltage() local
2635 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_VOLTAGE)) { regulator_is_supported_voltage()
2644 if (rdev->desc->continuous_voltage_range) regulator_is_supported_voltage()
2645 return min_uV >= rdev->constraints->min_uV && regulator_is_supported_voltage()
2646 max_uV <= rdev->constraints->max_uV; regulator_is_supported_voltage()
2664 static int regulator_map_voltage(struct regulator_dev *rdev, int min_uV, regulator_map_voltage() argument
2667 const struct regulator_desc *desc = rdev->desc; regulator_map_voltage()
2670 return desc->ops->map_voltage(rdev, min_uV, max_uV); regulator_map_voltage()
2673 return regulator_map_voltage_linear(rdev, min_uV, max_uV); regulator_map_voltage()
2676 return regulator_map_voltage_linear_range(rdev, min_uV, max_uV); regulator_map_voltage()
2678 return regulator_map_voltage_iterate(rdev, min_uV, max_uV); regulator_map_voltage()
2681 static int _regulator_call_set_voltage(struct regulator_dev *rdev, _regulator_call_set_voltage() argument
2688 data.old_uV = _regulator_get_voltage(rdev); _regulator_call_set_voltage()
2691 ret = _notifier_call_chain(rdev, REGULATOR_EVENT_PRE_VOLTAGE_CHANGE, _regulator_call_set_voltage()
2696 ret = rdev->desc->ops->set_voltage(rdev, min_uV, max_uV, selector); _regulator_call_set_voltage()
2700 _notifier_call_chain(rdev, REGULATOR_EVENT_ABORT_VOLTAGE_CHANGE, _regulator_call_set_voltage()
2706 static int _regulator_call_set_voltage_sel(struct regulator_dev *rdev, _regulator_call_set_voltage_sel() argument
2712 data.old_uV = _regulator_get_voltage(rdev); _regulator_call_set_voltage_sel()
2715 ret = _notifier_call_chain(rdev, REGULATOR_EVENT_PRE_VOLTAGE_CHANGE, _regulator_call_set_voltage_sel()
2720 ret = rdev->desc->ops->set_voltage_sel(rdev, selector); _regulator_call_set_voltage_sel()
2724 _notifier_call_chain(rdev, REGULATOR_EVENT_ABORT_VOLTAGE_CHANGE, _regulator_call_set_voltage_sel()
2730 static int _regulator_do_set_voltage(struct regulator_dev *rdev, _regulator_do_set_voltage() argument
2739 trace_regulator_set_voltage(rdev_get_name(rdev), min_uV, max_uV); _regulator_do_set_voltage()
2741 min_uV += rdev->constraints->uV_offset; _regulator_do_set_voltage()
2742 max_uV += rdev->constraints->uV_offset; _regulator_do_set_voltage()
2748 if (_regulator_is_enabled(rdev) && _regulator_do_set_voltage()
2749 rdev->desc->ops->set_voltage_time_sel && _regulator_do_set_voltage()
2750 rdev->desc->ops->get_voltage_sel) { _regulator_do_set_voltage()
2751 old_selector = rdev->desc->ops->get_voltage_sel(rdev); _regulator_do_set_voltage()
2756 if (rdev->desc->ops->set_voltage) { _regulator_do_set_voltage()
2757 ret = _regulator_call_set_voltage(rdev, min_uV, max_uV, _regulator_do_set_voltage()
2761 if (rdev->desc->ops->list_voltage) _regulator_do_set_voltage()
2762 best_val = rdev->desc->ops->list_voltage(rdev, _regulator_do_set_voltage()
2765 best_val = _regulator_get_voltage(rdev); _regulator_do_set_voltage()
2768 } else if (rdev->desc->ops->set_voltage_sel) { _regulator_do_set_voltage()
2769 ret = regulator_map_voltage(rdev, min_uV, max_uV); _regulator_do_set_voltage()
2771 best_val = rdev->desc->ops->list_voltage(rdev, ret); _regulator_do_set_voltage()
2778 rdev, best_val, selector); _regulator_do_set_voltage()
2788 if (ret == 0 && !rdev->constraints->ramp_disable && old_selector >= 0 _regulator_do_set_voltage()
2791 delay = rdev->desc->ops->set_voltage_time_sel(rdev, _regulator_do_set_voltage()
2794 rdev_warn(rdev, "set_voltage_time_sel() failed: %d\n", _regulator_do_set_voltage()
2811 _notifier_call_chain(rdev, REGULATOR_EVENT_VOLTAGE_CHANGE, _regulator_do_set_voltage()
2815 trace_regulator_set_voltage_complete(rdev_get_name(rdev), best_val); _regulator_do_set_voltage()
2823 struct regulator_dev *rdev = regulator->rdev; regulator_set_voltage_unlocked() local
2841 if (!(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_VOLTAGE)) { regulator_set_voltage_unlocked()
2842 current_uV = _regulator_get_voltage(rdev); regulator_set_voltage_unlocked()
2851 if (!rdev->desc->ops->set_voltage && regulator_set_voltage_unlocked()
2852 !rdev->desc->ops->set_voltage_sel) { regulator_set_voltage_unlocked()
2858 ret = regulator_check_voltage(rdev, &min_uV, &max_uV); regulator_set_voltage_unlocked()
2868 ret = regulator_check_consumers(rdev, &min_uV, &max_uV); regulator_set_voltage_unlocked()
2872 if (rdev->supply && (rdev->desc->min_dropout_uV || regulator_set_voltage_unlocked()
2873 !rdev->desc->ops->get_voltage)) { regulator_set_voltage_unlocked()
2877 selector = regulator_map_voltage(rdev, min_uV, max_uV); regulator_set_voltage_unlocked()
2889 best_supply_uV += rdev->desc->min_dropout_uV; regulator_set_voltage_unlocked()
2891 current_supply_uV = _regulator_get_voltage(rdev->supply->rdev); regulator_set_voltage_unlocked()
2901 ret = regulator_set_voltage_unlocked(rdev->supply, regulator_set_voltage_unlocked()
2904 dev_err(&rdev->dev, "Failed to increase supply voltage: %d\n", regulator_set_voltage_unlocked()
2910 ret = _regulator_do_set_voltage(rdev, min_uV, max_uV); regulator_set_voltage_unlocked()
2915 ret = regulator_set_voltage_unlocked(rdev->supply, regulator_set_voltage_unlocked()
2918 dev_warn(&rdev->dev, "Failed to decrease supply voltage: %d\n", regulator_set_voltage_unlocked()
2955 regulator_lock_supply(regulator->rdev); regulator_set_voltage()
2959 regulator_unlock_supply(regulator->rdev); regulator_set_voltage()
2978 struct regulator_dev *rdev = regulator->rdev; regulator_set_voltage_time() local
2979 const struct regulator_ops *ops = rdev->desc->ops; regulator_set_voltage_time()
2987 || !rdev->desc->n_voltages) regulator_set_voltage_time()
2990 for (i = 0; i < rdev->desc->n_voltages; i++) { regulator_set_voltage_time()
3006 return ops->set_voltage_time_sel(rdev, old_sel, new_sel); regulator_set_voltage_time()
3012 * @rdev: regulator source device
3022 int regulator_set_voltage_time_sel(struct regulator_dev *rdev, regulator_set_voltage_time_sel() argument
3029 if (rdev->constraints->ramp_delay) regulator_set_voltage_time_sel()
3030 ramp_delay = rdev->constraints->ramp_delay; regulator_set_voltage_time_sel()
3031 else if (rdev->desc->ramp_delay) regulator_set_voltage_time_sel()
3032 ramp_delay = rdev->desc->ramp_delay; regulator_set_voltage_time_sel()
3035 rdev_warn(rdev, "ramp_delay not set\n"); regulator_set_voltage_time_sel()
3040 if (!rdev->desc->ops->list_voltage) regulator_set_voltage_time_sel()
3043 old_volt = rdev->desc->ops->list_voltage(rdev, old_selector); regulator_set_voltage_time_sel()
3044 new_volt = rdev->desc->ops->list_voltage(rdev, new_selector); regulator_set_voltage_time_sel()
3060 struct regulator_dev *rdev = regulator->rdev; regulator_sync_voltage() local
3063 mutex_lock(&rdev->mutex); regulator_sync_voltage()
3065 if (!rdev->desc->ops->set_voltage && regulator_sync_voltage()
3066 !rdev->desc->ops->set_voltage_sel) { regulator_sync_voltage()
3081 ret = regulator_check_voltage(rdev, &min_uV, &max_uV); regulator_sync_voltage()
3085 ret = regulator_check_consumers(rdev, &min_uV, &max_uV); regulator_sync_voltage()
3089 ret = _regulator_do_set_voltage(rdev, min_uV, max_uV); regulator_sync_voltage()
3092 mutex_unlock(&rdev->mutex); regulator_sync_voltage()
3097 static int _regulator_get_voltage(struct regulator_dev *rdev) _regulator_get_voltage() argument
3101 if (rdev->desc->ops->get_voltage_sel) { _regulator_get_voltage()
3102 sel = rdev->desc->ops->get_voltage_sel(rdev); _regulator_get_voltage()
3105 ret = rdev->desc->ops->list_voltage(rdev, sel); _regulator_get_voltage()
3106 } else if (rdev->desc->ops->get_voltage) { _regulator_get_voltage()
3107 ret = rdev->desc->ops->get_voltage(rdev); _regulator_get_voltage()
3108 } else if (rdev->desc->ops->list_voltage) { _regulator_get_voltage()
3109 ret = rdev->desc->ops->list_voltage(rdev, 0); _regulator_get_voltage()
3110 } else if (rdev->desc->fixed_uV && (rdev->desc->n_voltages == 1)) { _regulator_get_voltage()
3111 ret = rdev->desc->fixed_uV; _regulator_get_voltage()
3112 } else if (rdev->supply) { _regulator_get_voltage()
3113 ret = _regulator_get_voltage(rdev->supply->rdev); _regulator_get_voltage()
3120 return ret - rdev->constraints->uV_offset; _regulator_get_voltage()
3136 regulator_lock_supply(regulator->rdev); regulator_get_voltage()
3138 ret = _regulator_get_voltage(regulator->rdev); regulator_get_voltage()
3140 regulator_unlock_supply(regulator->rdev); regulator_get_voltage()
3165 struct regulator_dev *rdev = regulator->rdev; regulator_set_current_limit() local
3168 mutex_lock(&rdev->mutex); regulator_set_current_limit()
3171 if (!rdev->desc->ops->set_current_limit) { regulator_set_current_limit()
3177 ret = regulator_check_current_limit(rdev, &min_uA, &max_uA); regulator_set_current_limit()
3181 ret = rdev->desc->ops->set_current_limit(rdev, min_uA, max_uA); regulator_set_current_limit()
3183 mutex_unlock(&rdev->mutex); regulator_set_current_limit()
3188 static int _regulator_get_current_limit(struct regulator_dev *rdev) _regulator_get_current_limit() argument
3192 mutex_lock(&rdev->mutex); _regulator_get_current_limit()
3195 if (!rdev->desc->ops->get_current_limit) { _regulator_get_current_limit()
3200 ret = rdev->desc->ops->get_current_limit(rdev); _regulator_get_current_limit()
3202 mutex_unlock(&rdev->mutex); _regulator_get_current_limit()
3217 return _regulator_get_current_limit(regulator->rdev); regulator_get_current_limit()
3234 struct regulator_dev *rdev = regulator->rdev; regulator_set_mode() local
3238 mutex_lock(&rdev->mutex); regulator_set_mode()
3241 if (!rdev->desc->ops->set_mode) { regulator_set_mode()
3247 if (rdev->desc->ops->get_mode) { regulator_set_mode()
3248 regulator_curr_mode = rdev->desc->ops->get_mode(rdev); regulator_set_mode()
3256 ret = regulator_mode_constrain(rdev, &mode); regulator_set_mode()
3260 ret = rdev->desc->ops->set_mode(rdev, mode); regulator_set_mode()
3262 mutex_unlock(&rdev->mutex); regulator_set_mode()
3267 static unsigned int _regulator_get_mode(struct regulator_dev *rdev) _regulator_get_mode() argument
3271 mutex_lock(&rdev->mutex); _regulator_get_mode()
3274 if (!rdev->desc->ops->get_mode) { _regulator_get_mode()
3279 ret = rdev->desc->ops->get_mode(rdev); _regulator_get_mode()
3281 mutex_unlock(&rdev->mutex); _regulator_get_mode()
3293 return _regulator_get_mode(regulator->rdev); regulator_get_mode()
3325 struct regulator_dev *rdev = regulator->rdev; regulator_set_load() local
3328 mutex_lock(&rdev->mutex); regulator_set_load()
3330 ret = drms_uA_update(rdev); regulator_set_load()
3331 mutex_unlock(&rdev->mutex); regulator_set_load()
3350 struct regulator_dev *rdev = regulator->rdev; regulator_allow_bypass() local
3353 if (!rdev->desc->ops->set_bypass) regulator_allow_bypass()
3356 if (rdev->constraints && regulator_allow_bypass()
3357 !(rdev->constraints->valid_ops_mask & REGULATOR_CHANGE_BYPASS)) regulator_allow_bypass()
3360 mutex_lock(&rdev->mutex); regulator_allow_bypass()
3363 rdev->bypass_count++; regulator_allow_bypass()
3365 if (rdev->bypass_count == rdev->open_count) { regulator_allow_bypass()
3366 ret = rdev->desc->ops->set_bypass(rdev, enable); regulator_allow_bypass()
3368 rdev->bypass_count--; regulator_allow_bypass()
3372 rdev->bypass_count--; regulator_allow_bypass()
3374 if (rdev->bypass_count != rdev->open_count) { regulator_allow_bypass()
3375 ret = rdev->desc->ops->set_bypass(rdev, enable); regulator_allow_bypass()
3377 rdev->bypass_count++; regulator_allow_bypass()
3384 mutex_unlock(&rdev->mutex); regulator_allow_bypass()
3400 return blocking_notifier_chain_register(&regulator->rdev->notifier, regulator_register_notifier()
3415 return blocking_notifier_chain_unregister(&regulator->rdev->notifier, regulator_unregister_notifier()
3423 static int _notifier_call_chain(struct regulator_dev *rdev, _notifier_call_chain() argument
3426 /* call rdev chain first */ _notifier_call_chain()
3427 return blocking_notifier_call_chain(&rdev->notifier, event, data); _notifier_call_chain()
3633 * @rdev: regulator source
3641 int regulator_notifier_call_chain(struct regulator_dev *rdev, regulator_notifier_call_chain() argument
3644 lockdep_assert_held_once(&rdev->mutex); regulator_notifier_call_chain()
3646 _notifier_call_chain(rdev, event, data); regulator_notifier_call_chain()
3711 struct regulator_dev *rdev = container_of(dev, struct regulator_dev, dev); regulator_attr_is_visible() local
3712 const struct regulator_ops *ops = rdev->desc->ops; regulator_attr_is_visible()
3723 if ((ops->get_voltage && ops->get_voltage(rdev) >= 0) || regulator_attr_is_visible()
3724 (ops->get_voltage_sel && ops->get_voltage_sel(rdev) >= 0) || regulator_attr_is_visible()
3725 (ops->list_voltage && ops->list_voltage(rdev, 0) >= 0) || regulator_attr_is_visible()
3726 (rdev->desc->fixed_uV && rdev->desc->n_voltages == 1)) regulator_attr_is_visible()
3738 return (rdev->ena_pin || ops->is_enabled) ? mode : 0; regulator_attr_is_visible()
3748 return rdev->desc->type == REGULATOR_CURRENT ? mode : 0; regulator_attr_is_visible()
3789 struct regulator_dev *rdev = dev_get_drvdata(dev); regulator_dev_release() local
3791 kfree(rdev->constraints); regulator_dev_release()
3792 of_node_put(rdev->dev.of_node); regulator_dev_release()
3793 kfree(rdev); regulator_dev_release()
3802 static void rdev_init_debugfs(struct regulator_dev *rdev) rdev_init_debugfs() argument
3804 struct device *parent = rdev->dev.parent; rdev_init_debugfs()
3805 const char *rname = rdev_get_name(rdev); rdev_init_debugfs()
3809 if (parent && rname == rdev->desc->name) { rdev_init_debugfs()
3815 rdev->debugfs = debugfs_create_dir(rname, debugfs_root); rdev_init_debugfs()
3816 if (!rdev->debugfs) { rdev_init_debugfs()
3817 rdev_warn(rdev, "Failed to create debugfs directory\n"); rdev_init_debugfs()
3821 debugfs_create_u32("use_count", 0444, rdev->debugfs, rdev_init_debugfs()
3822 &rdev->use_count); rdev_init_debugfs()
3823 debugfs_create_u32("open_count", 0444, rdev->debugfs, rdev_init_debugfs()
3824 &rdev->open_count); rdev_init_debugfs()
3825 debugfs_create_u32("bypass_count", 0444, rdev->debugfs, rdev_init_debugfs()
3826 &rdev->bypass_count); rdev_init_debugfs()
3846 struct regulator_dev *rdev; regulator_register() local
3879 rdev = kzalloc(sizeof(struct regulator_dev), GFP_KERNEL); regulator_register()
3880 if (rdev == NULL) regulator_register()
3889 kfree(rdev); regulator_register()
3894 &rdev->dev.of_node); regulator_register()
3897 rdev->dev.of_node = of_node_get(config->of_node); regulator_register()
3902 mutex_init(&rdev->mutex); regulator_register()
3903 rdev->reg_data = config->driver_data; regulator_register()
3904 rdev->owner = regulator_desc->owner; regulator_register()
3905 rdev->desc = regulator_desc; regulator_register()
3907 rdev->regmap = config->regmap; regulator_register()
3909 rdev->regmap = dev_get_regmap(dev, NULL); regulator_register()
3911 rdev->regmap = dev_get_regmap(dev->parent, NULL); regulator_register()
3912 INIT_LIST_HEAD(&rdev->consumer_list); regulator_register()
3913 INIT_LIST_HEAD(&rdev->list); regulator_register()
3914 BLOCKING_INIT_NOTIFIER_HEAD(&rdev->notifier); regulator_register()
3915 INIT_DELAYED_WORK(&rdev->disable_work, regulator_disable_work); regulator_register()
3919 ret = init_data->regulator_init(rdev->reg_data); regulator_register()
3925 rdev->dev.class = &regulator_class; regulator_register()
3926 rdev->dev.parent = dev; regulator_register()
3927 dev_set_name(&rdev->dev, "regulator.%lu", regulator_register()
3929 ret = device_register(&rdev->dev); regulator_register()
3931 put_device(&rdev->dev); regulator_register()
3935 dev_set_drvdata(&rdev->dev, rdev); regulator_register()
3939 ret = regulator_ena_gpio_request(rdev, config); regulator_register()
3941 rdev_err(rdev, "Failed to request enable GPIO%d: %d\n", regulator_register()
3951 ret = set_machine_constraints(rdev, constraints); regulator_register()
3956 rdev->supply_name = init_data->supply_regulator; regulator_register()
3958 rdev->supply_name = regulator_desc->supply_name; regulator_register()
3963 ret = set_consumer_device_supply(rdev, regulator_register()
3974 rdev_init_debugfs(rdev); regulator_register()
3978 return rdev; regulator_register()
3981 unset_regulator_supplies(rdev); regulator_register()
3984 regulator_ena_gpio_free(rdev); regulator_register()
3985 kfree(rdev->constraints); regulator_register()
3987 device_unregister(&rdev->dev); regulator_register()
3988 /* device core frees rdev */ regulator_register()
3989 rdev = ERR_PTR(ret); regulator_register()
3993 kfree(rdev); regulator_register()
3994 rdev = ERR_PTR(ret); regulator_register()
4001 * @rdev: regulator to unregister
4005 void regulator_unregister(struct regulator_dev *rdev) regulator_unregister() argument
4007 if (rdev == NULL) regulator_unregister()
4010 if (rdev->supply) { regulator_unregister()
4011 while (rdev->use_count--) regulator_unregister()
4012 regulator_disable(rdev->supply); regulator_unregister()
4013 regulator_put(rdev->supply); regulator_unregister()
4016 debugfs_remove_recursive(rdev->debugfs); regulator_unregister()
4017 flush_work(&rdev->disable_work.work); regulator_unregister()
4018 WARN_ON(rdev->open_count); regulator_unregister()
4019 unset_regulator_supplies(rdev); regulator_unregister()
4020 list_del(&rdev->list); regulator_unregister()
4022 regulator_ena_gpio_free(rdev); regulator_unregister()
4023 device_unregister(&rdev->dev); regulator_unregister()
4029 struct regulator_dev *rdev = dev_to_rdev(dev); _regulator_suspend_prepare() local
4033 mutex_lock(&rdev->mutex); _regulator_suspend_prepare()
4034 ret = suspend_prepare(rdev, *state); _regulator_suspend_prepare()
4035 mutex_unlock(&rdev->mutex); _regulator_suspend_prepare()
4060 struct regulator_dev *rdev = dev_to_rdev(dev); _regulator_suspend_finish() local
4063 mutex_lock(&rdev->mutex); _regulator_suspend_finish()
4064 if (rdev->use_count > 0 || rdev->constraints->always_on) { _regulator_suspend_finish()
4065 if (!_regulator_is_enabled(rdev)) { _regulator_suspend_finish()
4066 ret = _regulator_do_enable(rdev); _regulator_suspend_finish()
4075 if (!_regulator_is_enabled(rdev)) _regulator_suspend_finish()
4078 ret = _regulator_do_disable(rdev); _regulator_suspend_finish()
4083 mutex_unlock(&rdev->mutex); _regulator_suspend_finish()
4120 * rdev_get_drvdata - get rdev regulator driver data
4121 * @rdev: regulator
4123 * Get rdev regulator driver private data. This call can be used in the
4126 void *rdev_get_drvdata(struct regulator_dev *rdev) rdev_get_drvdata() argument
4128 return rdev->reg_data; rdev_get_drvdata()
4141 return regulator->rdev->reg_data; regulator_get_drvdata()
4152 regulator->rdev->reg_data = data; regulator_set_drvdata()
4158 * @rdev: regulator
4160 int rdev_get_id(struct regulator_dev *rdev) rdev_get_id() argument
4162 return rdev->desc->id; rdev_get_id()
4166 struct device *rdev_get_dev(struct regulator_dev *rdev) rdev_get_dev() argument
4168 return &rdev->dev; rdev_get_dev()
4225 struct regulator_dev *rdev,
4230 struct regulator_dev *rdev = dev_to_rdev(dev); regulator_summary_show_children() local
4233 if (rdev->supply && rdev->supply->rdev == summary_data->parent) regulator_summary_show_children()
4234 regulator_summary_show_subtree(summary_data->s, rdev, regulator_summary_show_children()
4241 struct regulator_dev *rdev, regulator_summary_show_subtree()
4248 if (!rdev) regulator_summary_show_subtree()
4253 30 - level * 3, rdev_get_name(rdev), regulator_summary_show_subtree()
4254 rdev->use_count, rdev->open_count, rdev->bypass_count); regulator_summary_show_subtree()
4256 seq_printf(s, "%5dmV ", _regulator_get_voltage(rdev) / 1000); regulator_summary_show_subtree()
4257 seq_printf(s, "%5dmA ", _regulator_get_current_limit(rdev) / 1000); regulator_summary_show_subtree()
4259 c = rdev->constraints; regulator_summary_show_subtree()
4261 switch (rdev->desc->type) { regulator_summary_show_subtree()
4275 list_for_each_entry(consumer, &rdev->consumer_list, list) { regulator_summary_show_subtree()
4283 switch (rdev->desc->type) { regulator_summary_show_subtree()
4298 summary_data.parent = rdev; regulator_summary_show_subtree()
4306 struct regulator_dev *rdev = dev_to_rdev(dev); regulator_summary_show_roots() local
4309 if (!rdev->supply) regulator_summary_show_roots()
4310 regulator_summary_show_subtree(s, rdev, 0); regulator_summary_show_roots()
4367 struct regulator_dev *rdev = dev_to_rdev(dev); regulator_late_cleanup() local
4368 const struct regulator_ops *ops = rdev->desc->ops; regulator_late_cleanup()
4369 struct regulation_constraints *c = rdev->constraints; regulator_late_cleanup()
4378 mutex_lock(&rdev->mutex); regulator_late_cleanup()
4380 if (rdev->use_count) regulator_late_cleanup()
4385 enabled = ops->is_enabled(rdev); regulator_late_cleanup()
4395 rdev_info(rdev, "disabling\n"); regulator_late_cleanup()
4396 ret = _regulator_do_disable(rdev); regulator_late_cleanup()
4398 rdev_err(rdev, "couldn't disable: %d\n", ret); regulator_late_cleanup()
4405 rdev_warn(rdev, "incomplete constraints, leaving on\n"); regulator_late_cleanup()
4409 mutex_unlock(&rdev->mutex); regulator_late_cleanup()
4240 regulator_summary_show_subtree(struct seq_file *s, struct regulator_dev *rdev, int level) regulator_summary_show_subtree() argument
H A Dmc13xxx-regulator-core.c31 static int mc13xxx_regulator_enable(struct regulator_dev *rdev) mc13xxx_regulator_enable() argument
33 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13xxx_regulator_enable()
35 int id = rdev_get_id(rdev); mc13xxx_regulator_enable()
37 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13xxx_regulator_enable()
44 static int mc13xxx_regulator_disable(struct regulator_dev *rdev) mc13xxx_regulator_disable() argument
46 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13xxx_regulator_disable()
48 int id = rdev_get_id(rdev); mc13xxx_regulator_disable()
50 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13xxx_regulator_disable()
56 static int mc13xxx_regulator_is_enabled(struct regulator_dev *rdev) mc13xxx_regulator_is_enabled() argument
58 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13xxx_regulator_is_enabled()
60 int ret, id = rdev_get_id(rdev); mc13xxx_regulator_is_enabled()
70 static int mc13xxx_regulator_set_voltage_sel(struct regulator_dev *rdev, mc13xxx_regulator_set_voltage_sel() argument
73 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13xxx_regulator_set_voltage_sel()
75 int id = rdev_get_id(rdev); mc13xxx_regulator_set_voltage_sel()
82 static int mc13xxx_regulator_get_voltage(struct regulator_dev *rdev) mc13xxx_regulator_get_voltage() argument
84 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13xxx_regulator_get_voltage()
86 int ret, id = rdev_get_id(rdev); mc13xxx_regulator_get_voltage()
89 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13xxx_regulator_get_voltage()
99 dev_dbg(rdev_get_dev(rdev), "%s id: %d val: %d\n", __func__, id, val); mc13xxx_regulator_get_voltage()
103 return rdev->desc->volt_table[val]; mc13xxx_regulator_get_voltage()
116 int mc13xxx_fixed_regulator_set_voltage(struct regulator_dev *rdev, int min_uV, mc13xxx_fixed_regulator_set_voltage() argument
119 int id = rdev_get_id(rdev); mc13xxx_fixed_regulator_set_voltage()
121 dev_dbg(rdev_get_dev(rdev), "%s id: %d min_uV: %d max_uV: %d\n", mc13xxx_fixed_regulator_set_voltage()
124 if (min_uV <= rdev->desc->volt_table[0] && mc13xxx_fixed_regulator_set_voltage()
125 rdev->desc->volt_table[0] <= max_uV) { mc13xxx_fixed_regulator_set_voltage()
H A Dmax77802.c101 * @rdev: regulator to mark as disabled
106 static int max77802_set_suspend_disable(struct regulator_dev *rdev) max77802_set_suspend_disable() argument
109 struct max77802_regulator_prv *max77802 = rdev_get_drvdata(rdev); max77802_set_suspend_disable()
110 int id = rdev_get_id(rdev); max77802_set_suspend_disable()
114 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_set_suspend_disable()
115 rdev->desc->enable_mask, val << shift); max77802_set_suspend_disable()
123 static int max77802_set_mode(struct regulator_dev *rdev, unsigned int mode) max77802_set_mode() argument
125 struct max77802_regulator_prv *max77802 = rdev_get_drvdata(rdev); max77802_set_mode()
126 int id = rdev_get_id(rdev); max77802_set_mode()
138 dev_warn(&rdev->dev, "%s: regulator mode: 0x%x not supported\n", max77802_set_mode()
139 rdev->desc->name, mode); max77802_set_mode()
144 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_set_mode()
145 rdev->desc->enable_mask, val << shift); max77802_set_mode()
148 static unsigned max77802_get_mode(struct regulator_dev *rdev) max77802_get_mode() argument
150 struct max77802_regulator_prv *max77802 = rdev_get_drvdata(rdev); max77802_get_mode()
151 int id = rdev_get_id(rdev); max77802_get_mode()
158 * @rdev: regulator to change mode
172 static int max77802_set_suspend_mode(struct regulator_dev *rdev, max77802_set_suspend_mode() argument
175 struct max77802_regulator_prv *max77802 = rdev_get_drvdata(rdev); max77802_set_suspend_mode()
176 int id = rdev_get_id(rdev); max77802_set_suspend_mode()
185 dev_warn(&rdev->dev, "%s: is disabled, mode: 0x%x not set\n", max77802_set_suspend_mode()
186 rdev->desc->name, mode); max77802_set_suspend_mode()
209 dev_warn(&rdev->dev, "%s: in Low Power: 0x%x invalid\n", max77802_set_suspend_mode()
210 rdev->desc->name, mode); max77802_set_suspend_mode()
213 dev_warn(&rdev->dev, "%s: regulator mode: 0x%x not supported\n", max77802_set_suspend_mode()
214 rdev->desc->name, mode); max77802_set_suspend_mode()
218 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_set_suspend_mode()
219 rdev->desc->enable_mask, val << shift); max77802_set_suspend_mode()
222 static int max77802_enable(struct regulator_dev *rdev) max77802_enable() argument
224 struct max77802_regulator_prv *max77802 = rdev_get_drvdata(rdev); max77802_enable()
225 int id = rdev_get_id(rdev); max77802_enable()
231 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_enable()
232 rdev->desc->enable_mask, max77802_enable()
236 static int max77802_find_ramp_value(struct regulator_dev *rdev, max77802_find_ramp_value() argument
248 dev_warn(&rdev->dev, "%s: ramp_delay: %d not supported, setting 100000\n", max77802_find_ramp_value()
249 rdev->desc->name, ramp_delay); max77802_find_ramp_value()
254 static int max77802_set_ramp_delay_2bit(struct regulator_dev *rdev, max77802_set_ramp_delay_2bit() argument
257 int id = rdev_get_id(rdev); max77802_set_ramp_delay_2bit()
261 dev_warn(&rdev->dev, max77802_set_ramp_delay_2bit()
263 rdev->desc->name); max77802_set_ramp_delay_2bit()
266 ramp_value = max77802_find_ramp_value(rdev, ramp_table_77802_2bit, max77802_set_ramp_delay_2bit()
269 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_set_ramp_delay_2bit()
275 static int max77802_set_ramp_delay_4bit(struct regulator_dev *rdev, max77802_set_ramp_delay_4bit() argument
280 ramp_value = max77802_find_ramp_value(rdev, ramp_table_77802_4bit, max77802_set_ramp_delay_4bit()
283 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77802_set_ramp_delay_4bit()
555 struct regulator_dev *rdev; max77802_pmic_probe() local
579 rdev = devm_regulator_register(&pdev->dev, max77802_pmic_probe()
581 if (IS_ERR(rdev)) { max77802_pmic_probe()
582 ret = PTR_ERR(rdev); max77802_pmic_probe()
H A Dab8500-ext.c32 * @rdev: regulator device
48 struct regulator_dev *rdev; member in struct:ab8500_ext_regulator_info
59 static int ab8500_ext_regulator_enable(struct regulator_dev *rdev) ab8500_ext_regulator_enable() argument
62 struct ab8500_ext_regulator_info *info = rdev_get_drvdata(rdev); ab8500_ext_regulator_enable()
66 dev_err(rdev_get_dev(rdev), "regulator info null pointer\n"); ab8500_ext_regulator_enable()
83 dev_err(rdev_get_dev(info->rdev), ab8500_ext_regulator_enable()
88 dev_dbg(rdev_get_dev(rdev), ab8500_ext_regulator_enable()
96 static int ab8500_ext_regulator_disable(struct regulator_dev *rdev) ab8500_ext_regulator_disable() argument
99 struct ab8500_ext_regulator_info *info = rdev_get_drvdata(rdev); ab8500_ext_regulator_disable()
103 dev_err(rdev_get_dev(rdev), "regulator info null pointer\n"); ab8500_ext_regulator_disable()
119 dev_err(rdev_get_dev(info->rdev), ab8500_ext_regulator_disable()
124 dev_dbg(rdev_get_dev(rdev), "%s-disable (bank, reg, mask, value):" ab8500_ext_regulator_disable()
132 static int ab8500_ext_regulator_is_enabled(struct regulator_dev *rdev) ab8500_ext_regulator_is_enabled() argument
135 struct ab8500_ext_regulator_info *info = rdev_get_drvdata(rdev); ab8500_ext_regulator_is_enabled()
139 dev_err(rdev_get_dev(rdev), "regulator info null pointer\n"); ab8500_ext_regulator_is_enabled()
146 dev_err(rdev_get_dev(rdev), ab8500_ext_regulator_is_enabled()
151 dev_dbg(rdev_get_dev(rdev), "%s-is_enabled (bank, reg, mask, value):" ab8500_ext_regulator_is_enabled()
163 static int ab8500_ext_regulator_set_mode(struct regulator_dev *rdev, ab8500_ext_regulator_set_mode() argument
167 struct ab8500_ext_regulator_info *info = rdev_get_drvdata(rdev); ab8500_ext_regulator_set_mode()
171 dev_err(rdev_get_dev(rdev), "regulator info null pointer\n"); ab8500_ext_regulator_set_mode()
191 if (ab8500_ext_regulator_is_enabled(rdev) && ab8500_ext_regulator_set_mode()
197 dev_err(rdev_get_dev(rdev), ab8500_ext_regulator_set_mode()
202 dev_dbg(rdev_get_dev(rdev), ab8500_ext_regulator_set_mode()
214 static unsigned int ab8500_ext_regulator_get_mode(struct regulator_dev *rdev) ab8500_ext_regulator_get_mode() argument
216 struct ab8500_ext_regulator_info *info = rdev_get_drvdata(rdev); ab8500_ext_regulator_get_mode()
220 dev_err(rdev_get_dev(rdev), "regulator info null pointer\n"); ab8500_ext_regulator_get_mode()
234 static int ab8500_ext_set_voltage(struct regulator_dev *rdev, int min_uV, ab8500_ext_set_voltage() argument
237 struct regulation_constraints *regu_constraints = rdev->constraints; ab8500_ext_set_voltage()
240 dev_err(rdev_get_dev(rdev), "No regulator constraints\n"); ab8500_ext_set_voltage()
248 dev_err(rdev_get_dev(rdev), ab8500_ext_set_voltage()
256 static int ab8500_ext_list_voltage(struct regulator_dev *rdev, ab8500_ext_list_voltage() argument
259 struct regulation_constraints *regu_constraints = rdev->constraints; ab8500_ext_list_voltage()
262 dev_err(rdev_get_dev(rdev), "regulator constraints null pointer\n"); ab8500_ext_list_voltage()
416 info->rdev = devm_regulator_register(&pdev->dev, &info->desc, ab8500_ext_regulator_probe()
418 if (IS_ERR(info->rdev)) { ab8500_ext_regulator_probe()
419 err = PTR_ERR(info->rdev); ab8500_ext_regulator_probe()
425 dev_dbg(rdev_get_dev(info->rdev), ab8500_ext_regulator_probe()
H A Drk808-regulator.c90 static int rk808_buck1_2_get_voltage_sel_regmap(struct regulator_dev *rdev) rk808_buck1_2_get_voltage_sel_regmap() argument
92 struct rk808_regulator_data *pdata = rdev_get_drvdata(rdev); rk808_buck1_2_get_voltage_sel_regmap()
93 int id = rdev->desc->id - RK808_ID_DCDC1; rk808_buck1_2_get_voltage_sel_regmap()
99 return regulator_get_voltage_sel_regmap(rdev); rk808_buck1_2_get_voltage_sel_regmap()
101 ret = regmap_read(rdev->regmap, rk808_buck1_2_get_voltage_sel_regmap()
102 rdev->desc->vsel_reg + RK808_DVS_REG_OFFSET, rk808_buck1_2_get_voltage_sel_regmap()
107 val &= rdev->desc->vsel_mask; rk808_buck1_2_get_voltage_sel_regmap()
108 val >>= ffs(rdev->desc->vsel_mask) - 1; rk808_buck1_2_get_voltage_sel_regmap()
113 static int rk808_buck1_2_i2c_set_voltage_sel(struct regulator_dev *rdev, rk808_buck1_2_i2c_set_voltage_sel() argument
117 unsigned int old_sel, tmp, val, mask = rdev->desc->vsel_mask; rk808_buck1_2_i2c_set_voltage_sel()
119 ret = regmap_read(rdev->regmap, rdev->desc->vsel_reg, &val); rk808_buck1_2_i2c_set_voltage_sel()
144 ret = regmap_write(rdev->regmap, rdev->desc->vsel_reg, val); rk808_buck1_2_i2c_set_voltage_sel()
150 ret = regmap_write(rdev->regmap, rdev->desc->vsel_reg, val); rk808_buck1_2_i2c_set_voltage_sel()
162 static int rk808_buck1_2_set_voltage_sel(struct regulator_dev *rdev, rk808_buck1_2_set_voltage_sel() argument
165 struct rk808_regulator_data *pdata = rdev_get_drvdata(rdev); rk808_buck1_2_set_voltage_sel()
166 int id = rdev->desc->id - RK808_ID_DCDC1; rk808_buck1_2_set_voltage_sel()
168 unsigned int reg = rdev->desc->vsel_reg; rk808_buck1_2_set_voltage_sel()
173 return rk808_buck1_2_i2c_set_voltage_sel(rdev, sel); rk808_buck1_2_set_voltage_sel()
178 ret = regmap_read(rdev->regmap, rdev->desc->vsel_reg, &old_sel); rk808_buck1_2_set_voltage_sel()
180 ret = regmap_read(rdev->regmap, rk808_buck1_2_set_voltage_sel()
188 sel <<= ffs(rdev->desc->vsel_mask) - 1; rk808_buck1_2_set_voltage_sel()
189 sel |= old_sel & ~rdev->desc->vsel_mask; rk808_buck1_2_set_voltage_sel()
191 ret = regmap_write(rdev->regmap, reg, sel); rk808_buck1_2_set_voltage_sel()
200 static int rk808_buck1_2_set_voltage_time_sel(struct regulator_dev *rdev, rk808_buck1_2_set_voltage_time_sel() argument
204 struct rk808_regulator_data *pdata = rdev_get_drvdata(rdev); rk808_buck1_2_set_voltage_time_sel()
205 int id = rdev->desc->id - RK808_ID_DCDC1; rk808_buck1_2_set_voltage_time_sel()
212 return regulator_set_voltage_time_sel(rdev, old_selector, new_selector); rk808_buck1_2_set_voltage_time_sel()
215 static int rk808_set_ramp_delay(struct regulator_dev *rdev, int ramp_delay) rk808_set_ramp_delay() argument
218 unsigned int reg = rk808_buck_config_regs[rdev->desc->id - rk808_set_ramp_delay()
235 rdev->desc->name, ramp_delay); rk808_set_ramp_delay()
238 return regmap_update_bits(rdev->regmap, reg, rk808_set_ramp_delay()
242 static int rk808_set_suspend_voltage(struct regulator_dev *rdev, int uv) rk808_set_suspend_voltage() argument
245 int sel = regulator_map_voltage_linear_range(rdev, uv, uv); rk808_set_suspend_voltage()
250 reg = rdev->desc->vsel_reg + RK808_SLP_REG_OFFSET; rk808_set_suspend_voltage()
252 return regmap_update_bits(rdev->regmap, reg, rk808_set_suspend_voltage()
253 rdev->desc->vsel_mask, rk808_set_suspend_voltage()
257 static int rk808_set_suspend_enable(struct regulator_dev *rdev) rk808_set_suspend_enable() argument
261 reg = rdev->desc->enable_reg + RK808_SLP_SET_OFF_REG_OFFSET; rk808_set_suspend_enable()
263 return regmap_update_bits(rdev->regmap, reg, rk808_set_suspend_enable()
264 rdev->desc->enable_mask, rk808_set_suspend_enable()
268 static int rk808_set_suspend_disable(struct regulator_dev *rdev) rk808_set_suspend_disable() argument
272 reg = rdev->desc->enable_reg + RK808_SLP_SET_OFF_REG_OFFSET; rk808_set_suspend_disable()
274 return regmap_update_bits(rdev->regmap, reg, rk808_set_suspend_disable()
275 rdev->desc->enable_mask, rk808_set_suspend_disable()
276 rdev->desc->enable_mask); rk808_set_suspend_disable()
H A Dpcap-regulator.c149 static int pcap_regulator_set_voltage_sel(struct regulator_dev *rdev, pcap_regulator_set_voltage_sel() argument
152 struct pcap_regulator *vreg = &vreg_table[rdev_get_id(rdev)]; pcap_regulator_set_voltage_sel()
153 void *pcap = rdev_get_drvdata(rdev); pcap_regulator_set_voltage_sel()
156 if (rdev->desc->n_voltages == 1) pcap_regulator_set_voltage_sel()
160 (rdev->desc->n_voltages - 1) << vreg->index, pcap_regulator_set_voltage_sel()
164 static int pcap_regulator_get_voltage_sel(struct regulator_dev *rdev) pcap_regulator_get_voltage_sel() argument
166 struct pcap_regulator *vreg = &vreg_table[rdev_get_id(rdev)]; pcap_regulator_get_voltage_sel()
167 void *pcap = rdev_get_drvdata(rdev); pcap_regulator_get_voltage_sel()
170 if (rdev->desc->n_voltages == 1) pcap_regulator_get_voltage_sel()
174 tmp = ((tmp >> vreg->index) & (rdev->desc->n_voltages - 1)); pcap_regulator_get_voltage_sel()
178 static int pcap_regulator_enable(struct regulator_dev *rdev) pcap_regulator_enable() argument
180 struct pcap_regulator *vreg = &vreg_table[rdev_get_id(rdev)]; pcap_regulator_enable()
181 void *pcap = rdev_get_drvdata(rdev); pcap_regulator_enable()
189 static int pcap_regulator_disable(struct regulator_dev *rdev) pcap_regulator_disable() argument
191 struct pcap_regulator *vreg = &vreg_table[rdev_get_id(rdev)]; pcap_regulator_disable()
192 void *pcap = rdev_get_drvdata(rdev); pcap_regulator_disable()
200 static int pcap_regulator_is_enabled(struct regulator_dev *rdev) pcap_regulator_is_enabled() argument
202 struct pcap_regulator *vreg = &vreg_table[rdev_get_id(rdev)]; pcap_regulator_is_enabled()
203 void *pcap = rdev_get_drvdata(rdev); pcap_regulator_is_enabled()
241 struct regulator_dev *rdev; pcap_regulator_probe() local
249 rdev = devm_regulator_register(&pdev->dev, &pcap_regulators[pdev->id], pcap_regulator_probe()
251 if (IS_ERR(rdev)) pcap_regulator_probe()
252 return PTR_ERR(rdev); pcap_regulator_probe()
254 platform_set_drvdata(pdev, rdev); pcap_regulator_probe()
H A Daat2870-regulator.c45 static int aat2870_ldo_set_voltage_sel(struct regulator_dev *rdev, aat2870_ldo_set_voltage_sel() argument
48 struct aat2870_regulator *ri = rdev_get_drvdata(rdev); aat2870_ldo_set_voltage_sel()
55 static int aat2870_ldo_get_voltage_sel(struct regulator_dev *rdev) aat2870_ldo_get_voltage_sel() argument
57 struct aat2870_regulator *ri = rdev_get_drvdata(rdev); aat2870_ldo_get_voltage_sel()
69 static int aat2870_ldo_enable(struct regulator_dev *rdev) aat2870_ldo_enable() argument
71 struct aat2870_regulator *ri = rdev_get_drvdata(rdev); aat2870_ldo_enable()
78 static int aat2870_ldo_disable(struct regulator_dev *rdev) aat2870_ldo_disable() argument
80 struct aat2870_regulator *ri = rdev_get_drvdata(rdev); aat2870_ldo_disable()
86 static int aat2870_ldo_is_enabled(struct regulator_dev *rdev) aat2870_ldo_is_enabled() argument
88 struct aat2870_regulator *ri = rdev_get_drvdata(rdev); aat2870_ldo_is_enabled()
167 struct regulator_dev *rdev; aat2870_regulator_probe() local
180 rdev = devm_regulator_register(&pdev->dev, &ri->desc, &config); aat2870_regulator_probe()
181 if (IS_ERR(rdev)) { aat2870_regulator_probe()
184 return PTR_ERR(rdev); aat2870_regulator_probe()
186 platform_set_drvdata(pdev, rdev); aat2870_regulator_probe()
H A Dmax8925-regulator.c45 static int max8925_set_voltage_sel(struct regulator_dev *rdev, max8925_set_voltage_sel() argument
48 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_set_voltage_sel()
49 unsigned char mask = rdev->desc->n_voltages - 1; max8925_set_voltage_sel()
54 static int max8925_get_voltage_sel(struct regulator_dev *rdev) max8925_get_voltage_sel() argument
56 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_get_voltage_sel()
63 mask = rdev->desc->n_voltages - 1; max8925_get_voltage_sel()
69 static int max8925_enable(struct regulator_dev *rdev) max8925_enable() argument
71 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_enable()
80 static int max8925_disable(struct regulator_dev *rdev) max8925_disable() argument
82 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_disable()
90 static int max8925_is_enabled(struct regulator_dev *rdev) max8925_is_enabled() argument
92 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_is_enabled()
105 static int max8925_set_dvm_voltage(struct regulator_dev *rdev, int uV) max8925_set_dvm_voltage() argument
107 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_set_dvm_voltage()
120 static int max8925_set_dvm_enable(struct regulator_dev *rdev) max8925_set_dvm_enable() argument
122 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_set_dvm_enable()
128 static int max8925_set_dvm_disable(struct regulator_dev *rdev) max8925_set_dvm_disable() argument
130 struct max8925_regulator_info *info = rdev_get_drvdata(rdev); max8925_set_dvm_disable()
283 struct regulator_dev *rdev; max8925_regulator_probe() local
313 rdev = devm_regulator_register(&pdev->dev, &ri->desc, &config); max8925_regulator_probe()
314 if (IS_ERR(rdev)) { max8925_regulator_probe()
317 return PTR_ERR(rdev); max8925_regulator_probe()
320 platform_set_drvdata(pdev, rdev); max8925_regulator_probe()
H A Dmax8660.c111 static int max8660_dcdc_is_enabled(struct regulator_dev *rdev) max8660_dcdc_is_enabled() argument
113 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_dcdc_is_enabled()
115 u8 mask = (rdev_get_id(rdev) == MAX8660_V3) ? 1 : 4; max8660_dcdc_is_enabled()
120 static int max8660_dcdc_enable(struct regulator_dev *rdev) max8660_dcdc_enable() argument
122 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_dcdc_enable()
123 u8 bit = (rdev_get_id(rdev) == MAX8660_V3) ? 1 : 4; max8660_dcdc_enable()
128 static int max8660_dcdc_disable(struct regulator_dev *rdev) max8660_dcdc_disable() argument
130 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_dcdc_disable()
131 u8 mask = (rdev_get_id(rdev) == MAX8660_V3) ? ~1 : ~4; max8660_dcdc_disable()
136 static int max8660_dcdc_get_voltage_sel(struct regulator_dev *rdev) max8660_dcdc_get_voltage_sel() argument
138 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_dcdc_get_voltage_sel()
139 u8 reg = (rdev_get_id(rdev) == MAX8660_V3) ? MAX8660_ADTV2 : MAX8660_SDTV2; max8660_dcdc_get_voltage_sel()
145 static int max8660_dcdc_set_voltage_sel(struct regulator_dev *rdev, max8660_dcdc_set_voltage_sel() argument
148 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_dcdc_set_voltage_sel()
152 reg = (rdev_get_id(rdev) == MAX8660_V3) ? MAX8660_ADTV2 : MAX8660_SDTV2; max8660_dcdc_set_voltage_sel()
158 bits = (rdev_get_id(rdev) == MAX8660_V3) ? 0x03 : 0x30; max8660_dcdc_set_voltage_sel()
175 static int max8660_ldo5_get_voltage_sel(struct regulator_dev *rdev) max8660_ldo5_get_voltage_sel() argument
177 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo5_get_voltage_sel()
183 static int max8660_ldo5_set_voltage_sel(struct regulator_dev *rdev, max8660_ldo5_set_voltage_sel() argument
186 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo5_set_voltage_sel()
209 static int max8660_ldo67_is_enabled(struct regulator_dev *rdev) max8660_ldo67_is_enabled() argument
211 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo67_is_enabled()
213 u8 mask = (rdev_get_id(rdev) == MAX8660_V6) ? 2 : 4; max8660_ldo67_is_enabled()
218 static int max8660_ldo67_enable(struct regulator_dev *rdev) max8660_ldo67_enable() argument
220 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo67_enable()
221 u8 bit = (rdev_get_id(rdev) == MAX8660_V6) ? 2 : 4; max8660_ldo67_enable()
226 static int max8660_ldo67_disable(struct regulator_dev *rdev) max8660_ldo67_disable() argument
228 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo67_disable()
229 u8 mask = (rdev_get_id(rdev) == MAX8660_V6) ? ~2 : ~4; max8660_ldo67_disable()
234 static int max8660_ldo67_get_voltage_sel(struct regulator_dev *rdev) max8660_ldo67_get_voltage_sel() argument
236 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo67_get_voltage_sel()
237 u8 shift = (rdev_get_id(rdev) == MAX8660_V6) ? 0 : 4; max8660_ldo67_get_voltage_sel()
243 static int max8660_ldo67_set_voltage_sel(struct regulator_dev *rdev, max8660_ldo67_set_voltage_sel() argument
246 struct max8660 *max8660 = rdev_get_drvdata(rdev); max8660_ldo67_set_voltage_sel()
248 if (rdev_get_id(rdev) == MAX8660_V6) max8660_ldo67_set_voltage_sel()
487 struct regulator_dev *rdev; max8660_probe() local
496 rdev = devm_regulator_register(&client->dev, max8660_probe()
498 if (IS_ERR(rdev)) { max8660_probe()
499 ret = PTR_ERR(rdev); max8660_probe()
502 return PTR_ERR(rdev); max8660_probe()
H A Dtps6105x-regulator.c30 static int tps6105x_regulator_enable(struct regulator_dev *rdev) tps6105x_regulator_enable() argument
32 struct tps6105x *tps6105x = rdev_get_drvdata(rdev); tps6105x_regulator_enable()
45 static int tps6105x_regulator_disable(struct regulator_dev *rdev) tps6105x_regulator_disable() argument
47 struct tps6105x *tps6105x = rdev_get_drvdata(rdev); tps6105x_regulator_disable()
60 static int tps6105x_regulator_is_enabled(struct regulator_dev *rdev) tps6105x_regulator_is_enabled() argument
62 struct tps6105x *tps6105x = rdev_get_drvdata(rdev); tps6105x_regulator_is_enabled()
78 static int tps6105x_regulator_get_voltage_sel(struct regulator_dev *rdev) tps6105x_regulator_get_voltage_sel() argument
80 struct tps6105x *tps6105x = rdev_get_drvdata(rdev); tps6105x_regulator_get_voltage_sel()
93 static int tps6105x_regulator_set_voltage_sel(struct regulator_dev *rdev, tps6105x_regulator_set_voltage_sel() argument
96 struct tps6105x *tps6105x = rdev_get_drvdata(rdev); tps6105x_regulator_set_voltage_sel()
H A Dda903x.c89 static inline struct device *to_da903x_dev(struct regulator_dev *rdev) to_da903x_dev() argument
91 return rdev_get_dev(rdev)->parent->parent; to_da903x_dev()
104 static int da903x_set_voltage_sel(struct regulator_dev *rdev, unsigned selector) da903x_set_voltage_sel() argument
106 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da903x_set_voltage_sel()
107 struct device *da9034_dev = to_da903x_dev(rdev); da903x_set_voltage_sel()
110 if (rdev->desc->n_voltages == 1) da903x_set_voltage_sel()
119 static int da903x_get_voltage_sel(struct regulator_dev *rdev) da903x_get_voltage_sel() argument
121 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da903x_get_voltage_sel()
122 struct device *da9034_dev = to_da903x_dev(rdev); da903x_get_voltage_sel()
126 if (rdev->desc->n_voltages == 1) da903x_get_voltage_sel()
139 static int da903x_enable(struct regulator_dev *rdev) da903x_enable() argument
141 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da903x_enable()
142 struct device *da9034_dev = to_da903x_dev(rdev); da903x_enable()
148 static int da903x_disable(struct regulator_dev *rdev) da903x_disable() argument
150 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da903x_disable()
151 struct device *da9034_dev = to_da903x_dev(rdev); da903x_disable()
157 static int da903x_is_enabled(struct regulator_dev *rdev) da903x_is_enabled() argument
159 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da903x_is_enabled()
160 struct device *da9034_dev = to_da903x_dev(rdev); da903x_is_enabled()
172 static int da9030_set_ldo1_15_voltage_sel(struct regulator_dev *rdev, da9030_set_ldo1_15_voltage_sel() argument
175 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da9030_set_ldo1_15_voltage_sel()
176 struct device *da903x_dev = to_da903x_dev(rdev); da9030_set_ldo1_15_voltage_sel()
193 static int da9030_map_ldo14_voltage(struct regulator_dev *rdev, da9030_map_ldo14_voltage() argument
196 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da9030_map_ldo14_voltage()
215 static int da9030_list_ldo14_voltage(struct regulator_dev *rdev, da9030_list_ldo14_voltage() argument
218 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da9030_list_ldo14_voltage()
222 volt = rdev->desc->min_uV + da9030_list_ldo14_voltage()
223 rdev->desc->uV_step * (3 - (selector & ~0x4)); da9030_list_ldo14_voltage()
225 volt = (info->max_uV + rdev->desc->min_uV) / 2 + da9030_list_ldo14_voltage()
226 rdev->desc->uV_step * (selector & ~0x4); da9030_list_ldo14_voltage()
235 static int da9034_set_dvc_voltage_sel(struct regulator_dev *rdev, da9034_set_dvc_voltage_sel() argument
238 struct da903x_regulator_info *info = rdev_get_drvdata(rdev); da9034_set_dvc_voltage_sel()
239 struct device *da9034_dev = to_da903x_dev(rdev); da9034_set_dvc_voltage_sel()
437 struct regulator_dev *rdev; da903x_regulator_probe() local
464 rdev = devm_regulator_register(&pdev->dev, &ri->desc, &config); da903x_regulator_probe()
465 if (IS_ERR(rdev)) { da903x_regulator_probe()
468 return PTR_ERR(rdev); da903x_regulator_probe()
471 platform_set_drvdata(pdev, rdev); da903x_regulator_probe()
H A Ds2mpa01.c56 static int s2mpa01_regulator_set_voltage_time_sel(struct regulator_dev *rdev, s2mpa01_regulator_set_voltage_time_sel() argument
60 struct s2mpa01_info *s2mpa01 = rdev_get_drvdata(rdev); s2mpa01_regulator_set_voltage_time_sel()
64 switch (rdev_get_id(rdev)) { s2mpa01_regulator_set_voltage_time_sel()
90 ramp_delay = rdev->desc->ramp_delay; s2mpa01_regulator_set_voltage_time_sel()
92 old_volt = rdev->desc->min_uV + (rdev->desc->uV_step * old_selector); s2mpa01_regulator_set_voltage_time_sel()
93 new_volt = rdev->desc->min_uV + (rdev->desc->uV_step * new_selector); s2mpa01_regulator_set_voltage_time_sel()
98 static int s2mpa01_set_ramp_delay(struct regulator_dev *rdev, int ramp_delay) s2mpa01_set_ramp_delay() argument
100 struct s2mpa01_info *s2mpa01 = rdev_get_drvdata(rdev); s2mpa01_set_ramp_delay()
105 switch (rdev_get_id(rdev)) { s2mpa01_set_ramp_delay()
195 if (rdev_get_id(rdev) >= S2MPA01_BUCK1 && s2mpa01_set_ramp_delay()
196 rdev_get_id(rdev) <= S2MPA01_BUCK4) { s2mpa01_set_ramp_delay()
197 ret = regmap_update_bits(rdev->regmap, S2MPA01_REG_RAMP1, s2mpa01_set_ramp_delay()
200 dev_err(&rdev->dev, "failed to enable ramp rate\n"); s2mpa01_set_ramp_delay()
207 return regmap_update_bits(rdev->regmap, ramp_reg, 0x3 << ramp_shift, s2mpa01_set_ramp_delay()
211 return regmap_update_bits(rdev->regmap, S2MPA01_REG_RAMP1, s2mpa01_set_ramp_delay()
378 struct regulator_dev *rdev; s2mpa01_pmic_probe() local
387 rdev = devm_regulator_register(&pdev->dev, s2mpa01_pmic_probe()
389 if (IS_ERR(rdev)) { s2mpa01_pmic_probe()
392 return PTR_ERR(rdev); s2mpa01_pmic_probe()
H A Dtps65090-regulator.c47 * @rdev: The struct regulator_dev for the regulator.
55 struct regulator_dev *rdev; member in struct:tps65090_regulator
70 * @rdev: Regulator device
75 struct regulator_dev *rdev) tps65090_reg_set_overcurrent_wait()
79 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, tps65090_reg_set_overcurrent_wait()
83 dev_err(&rdev->dev, "Error updating overcurrent wait %#x\n", tps65090_reg_set_overcurrent_wait()
84 rdev->desc->enable_reg); tps65090_reg_set_overcurrent_wait()
93 * @rdev: Regulator device
98 static int tps65090_try_enable_fet(struct regulator_dev *rdev) tps65090_try_enable_fet() argument
103 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, tps65090_try_enable_fet()
104 rdev->desc->enable_mask, tps65090_try_enable_fet()
105 rdev->desc->enable_mask); tps65090_try_enable_fet()
107 dev_err(&rdev->dev, "Error in updating reg %#x\n", tps65090_try_enable_fet()
108 rdev->desc->enable_reg); tps65090_try_enable_fet()
113 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, tps65090_try_enable_fet()
139 * @rdev: Regulator device
143 static int tps65090_fet_enable(struct regulator_dev *rdev) tps65090_fet_enable() argument
153 ret = tps65090_try_enable_fet(rdev); tps65090_fet_enable()
160 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, tps65090_fet_enable()
161 rdev->desc->enable_mask, 0); tps65090_fet_enable()
169 dev_warn(&rdev->dev, "reg %#x enable ok after %d tries\n", tps65090_fet_enable()
170 rdev->desc->enable_reg, tries); tps65090_fet_enable()
174 dev_warn(&rdev->dev, "reg %#x enable failed\n", rdev->desc->enable_reg); tps65090_fet_enable()
416 struct regulator_dev *rdev; tps65090_regulator_probe() local
484 rdev = devm_regulator_register(&pdev->dev, ri->desc, &config); tps65090_regulator_probe()
485 if (IS_ERR(rdev)) { tps65090_regulator_probe()
488 return PTR_ERR(rdev); tps65090_regulator_probe()
490 ri->rdev = rdev; tps65090_regulator_probe()
493 ret = tps65090_reg_set_overcurrent_wait(ri, rdev); tps65090_regulator_probe()
74 tps65090_reg_set_overcurrent_wait(struct tps65090_regulator *ri, struct regulator_dev *rdev) tps65090_reg_set_overcurrent_wait() argument
H A Dda9052-regulator.c79 struct regulator_dev *rdev; member in struct:da9052_regulator
91 static int da9052_dcdc_get_current_limit(struct regulator_dev *rdev) da9052_dcdc_get_current_limit() argument
93 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_dcdc_get_current_limit()
94 int offset = rdev_get_id(rdev); da9052_dcdc_get_current_limit()
118 static int da9052_dcdc_set_current_limit(struct regulator_dev *rdev, int min_uA, da9052_dcdc_set_current_limit() argument
121 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_dcdc_set_current_limit()
122 int offset = rdev_get_id(rdev); da9052_dcdc_set_current_limit()
158 static int da9052_list_voltage(struct regulator_dev *rdev, da9052_list_voltage() argument
161 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_list_voltage()
163 int id = rdev_get_id(rdev); da9052_list_voltage()
182 static int da9052_map_voltage(struct regulator_dev *rdev, da9052_map_voltage() argument
185 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_map_voltage()
187 int id = rdev_get_id(rdev); da9052_map_voltage()
206 ret = da9052_list_voltage(rdev, sel); da9052_map_voltage()
213 static int da9052_regulator_set_voltage_sel(struct regulator_dev *rdev, da9052_regulator_set_voltage_sel() argument
216 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_regulator_set_voltage_sel()
218 int id = rdev_get_id(rdev); da9052_regulator_set_voltage_sel()
221 ret = da9052_reg_update(regulator->da9052, rdev->desc->vsel_reg, da9052_regulator_set_voltage_sel()
222 rdev->desc->vsel_mask, selector); da9052_regulator_set_voltage_sel()
243 static int da9052_regulator_set_voltage_time_sel(struct regulator_dev *rdev, da9052_regulator_set_voltage_time_sel() argument
247 struct da9052_regulator *regulator = rdev_get_drvdata(rdev); da9052_regulator_set_voltage_time_sel()
249 int id = rdev_get_id(rdev); da9052_regulator_set_voltage_time_sel()
451 regulator->rdev = devm_regulator_register(&pdev->dev,
454 if (IS_ERR(regulator->rdev)) {
457 return PTR_ERR(regulator->rdev);
H A Dmax77686.c132 static int max77686_set_suspend_disable(struct regulator_dev *rdev) max77686_set_suspend_disable() argument
135 struct max77686_data *max77686 = rdev_get_drvdata(rdev); max77686_set_suspend_disable()
136 int ret, id = rdev_get_id(rdev); max77686_set_suspend_disable()
141 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77686_set_suspend_disable()
142 rdev->desc->enable_mask, val << shift); max77686_set_suspend_disable()
151 static int max77686_set_suspend_mode(struct regulator_dev *rdev, max77686_set_suspend_mode() argument
154 struct max77686_data *max77686 = rdev_get_drvdata(rdev); max77686_set_suspend_mode()
156 int ret, id = rdev_get_id(rdev); max77686_set_suspend_mode()
171 rdev->desc->name, mode); max77686_set_suspend_mode()
175 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77686_set_suspend_mode()
176 rdev->desc->enable_mask, max77686_set_suspend_mode()
186 static int max77686_ldo_set_suspend_mode(struct regulator_dev *rdev, max77686_ldo_set_suspend_mode() argument
190 struct max77686_data *max77686 = rdev_get_drvdata(rdev); max77686_ldo_set_suspend_mode()
191 int ret, id = rdev_get_id(rdev); max77686_ldo_set_suspend_mode()
205 rdev->desc->name, mode); max77686_ldo_set_suspend_mode()
209 ret = regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77686_ldo_set_suspend_mode()
210 rdev->desc->enable_mask, max77686_ldo_set_suspend_mode()
219 static int max77686_enable(struct regulator_dev *rdev) max77686_enable() argument
221 struct max77686_data *max77686 = rdev_get_drvdata(rdev); max77686_enable()
223 int id = rdev_get_id(rdev); max77686_enable()
230 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77686_enable()
231 rdev->desc->enable_mask, max77686_enable()
235 static int max77686_set_ramp_delay(struct regulator_dev *rdev, int ramp_delay) max77686_set_ramp_delay() argument
253 rdev->desc->name, ramp_delay); max77686_set_ramp_delay()
256 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, max77686_set_ramp_delay()
525 struct regulator_dev *rdev; max77686_pmic_probe() local
529 rdev = devm_regulator_register(&pdev->dev, max77686_pmic_probe()
531 if (IS_ERR(rdev)) { max77686_pmic_probe()
532 int ret = PTR_ERR(rdev); max77686_pmic_probe()
H A Dpwm-regulator.c44 static int pwm_regulator_get_voltage_sel(struct regulator_dev *rdev) pwm_regulator_get_voltage_sel() argument
46 struct pwm_regulator_data *drvdata = rdev_get_drvdata(rdev); pwm_regulator_get_voltage_sel()
51 static int pwm_regulator_set_voltage_sel(struct regulator_dev *rdev, pwm_regulator_set_voltage_sel() argument
54 struct pwm_regulator_data *drvdata = rdev_get_drvdata(rdev); pwm_regulator_set_voltage_sel()
66 dev_err(&rdev->dev, "Failed to configure PWM\n"); pwm_regulator_set_voltage_sel()
75 static int pwm_regulator_list_voltage(struct regulator_dev *rdev, pwm_regulator_list_voltage() argument
78 struct pwm_regulator_data *drvdata = rdev_get_drvdata(rdev); pwm_regulator_list_voltage()
80 if (selector >= rdev->desc->n_voltages) pwm_regulator_list_voltage()
112 static int pwm_voltage_to_duty_cycle_percentage(struct regulator_dev *rdev, int req_uV) pwm_voltage_to_duty_cycle_percentage() argument
114 int min_uV = rdev->constraints->min_uV; pwm_voltage_to_duty_cycle_percentage()
115 int max_uV = rdev->constraints->max_uV; pwm_voltage_to_duty_cycle_percentage()
121 static int pwm_regulator_get_voltage(struct regulator_dev *rdev) pwm_regulator_get_voltage() argument
123 struct pwm_regulator_data *drvdata = rdev_get_drvdata(rdev); pwm_regulator_get_voltage()
128 static int pwm_regulator_set_voltage(struct regulator_dev *rdev, pwm_regulator_set_voltage() argument
132 struct pwm_regulator_data *drvdata = rdev_get_drvdata(rdev); pwm_regulator_set_voltage()
133 unsigned int ramp_delay = rdev->constraints->ramp_delay; pwm_regulator_set_voltage()
138 duty_cycle = pwm_voltage_to_duty_cycle_percentage(rdev, min_uV); pwm_regulator_set_voltage()
142 dev_err(&rdev->dev, "Failed to configure PWM\n"); pwm_regulator_set_voltage()
148 dev_err(&rdev->dev, "Failed to enable PWM\n"); pwm_regulator_set_voltage()
H A Dtps80031-regulator.c88 struct regulator_dev *rdev; member in struct:tps80031_regulator
96 static inline struct device *to_tps80031_dev(struct regulator_dev *rdev) to_tps80031_dev() argument
98 return rdev_get_dev(rdev)->parent->parent; to_tps80031_dev()
101 static int tps80031_reg_is_enabled(struct regulator_dev *rdev) tps80031_reg_is_enabled() argument
103 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_reg_is_enabled()
104 struct device *parent = to_tps80031_dev(rdev); tps80031_reg_is_enabled()
114 dev_err(&rdev->dev, "Reg 0x%02x read failed, err = %d\n", tps80031_reg_is_enabled()
121 static int tps80031_reg_enable(struct regulator_dev *rdev) tps80031_reg_enable() argument
123 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_reg_enable()
124 struct device *parent = to_tps80031_dev(rdev); tps80031_reg_enable()
133 dev_err(&rdev->dev, "Reg 0x%02x update failed, err = %d\n", tps80031_reg_enable()
140 static int tps80031_reg_disable(struct regulator_dev *rdev) tps80031_reg_disable() argument
142 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_reg_disable()
143 struct device *parent = to_tps80031_dev(rdev); tps80031_reg_disable()
152 dev_err(&rdev->dev, "Reg 0x%02x update failed, err = %d\n", tps80031_reg_disable()
165 static int tps80031_dcdc_list_voltage(struct regulator_dev *rdev, unsigned sel) tps80031_dcdc_list_voltage() argument
167 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_dcdc_list_voltage()
173 return regulator_list_voltage_linear(rdev, sel - 1); tps80031_dcdc_list_voltage()
178 static int tps80031_dcdc_set_voltage_sel(struct regulator_dev *rdev, tps80031_dcdc_set_voltage_sel() argument
181 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_dcdc_set_voltage_sel()
182 struct device *parent = to_tps80031_dev(rdev); tps80031_dcdc_set_voltage_sel()
212 static int tps80031_dcdc_get_voltage_sel(struct regulator_dev *rdev) tps80031_dcdc_get_voltage_sel() argument
214 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_dcdc_get_voltage_sel()
215 struct device *parent = to_tps80031_dev(rdev); tps80031_dcdc_get_voltage_sel()
241 static int tps80031_ldo_list_voltage(struct regulator_dev *rdev, tps80031_ldo_list_voltage() argument
244 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_ldo_list_voltage()
245 struct device *parent = to_tps80031_dev(rdev); tps80031_ldo_list_voltage()
262 return regulator_list_voltage_linear(rdev, sel); tps80031_ldo_list_voltage()
265 static int tps80031_ldo_map_voltage(struct regulator_dev *rdev, tps80031_ldo_map_voltage() argument
268 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_ldo_map_voltage()
269 struct device *parent = to_tps80031_dev(rdev); tps80031_ldo_map_voltage()
277 return regulator_map_voltage_iterate(rdev, min_uV, tps80031_ldo_map_voltage()
282 return regulator_map_voltage_linear(rdev, min_uV, max_uV); tps80031_ldo_map_voltage()
285 static int tps80031_vbus_is_enabled(struct regulator_dev *rdev) tps80031_vbus_is_enabled() argument
287 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_vbus_is_enabled()
288 struct device *parent = to_tps80031_dev(rdev); tps80031_vbus_is_enabled()
312 static int tps80031_vbus_enable(struct regulator_dev *rdev) tps80031_vbus_enable() argument
314 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_vbus_enable()
315 struct device *parent = to_tps80031_dev(rdev); tps80031_vbus_enable()
336 static int tps80031_vbus_disable(struct regulator_dev *rdev) tps80031_vbus_disable() argument
338 struct tps80031_regulator *ri = rdev_get_drvdata(rdev); tps80031_vbus_disable()
339 struct device *parent = to_tps80031_dev(rdev); tps80031_vbus_disable()
681 struct regulator_dev *rdev; tps80031_regulator_probe() local
731 rdev = devm_regulator_register(&pdev->dev, &ri->rinfo->desc, tps80031_regulator_probe()
733 if (IS_ERR(rdev)) { tps80031_regulator_probe()
737 return PTR_ERR(rdev); tps80031_regulator_probe()
739 ri->rdev = rdev; tps80031_regulator_probe()
H A Ddb8500-prcmu.c25 static int db8500_regulator_enable(struct regulator_dev *rdev) db8500_regulator_enable() argument
27 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_enable()
32 dev_vdbg(rdev_get_dev(rdev), "regulator-%s-enable\n", db8500_regulator_enable()
44 static int db8500_regulator_disable(struct regulator_dev *rdev) db8500_regulator_disable() argument
46 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_disable()
52 dev_vdbg(rdev_get_dev(rdev), "regulator-%s-disable\n", db8500_regulator_disable()
64 static int db8500_regulator_is_enabled(struct regulator_dev *rdev) db8500_regulator_is_enabled() argument
66 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_is_enabled()
71 dev_vdbg(rdev_get_dev(rdev), "regulator-%s-is_enabled (is_enabled):" db8500_regulator_is_enabled()
141 static int db8500_regulator_switch_enable(struct regulator_dev *rdev) db8500_regulator_switch_enable() argument
143 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_switch_enable()
149 dev_vdbg(rdev_get_dev(rdev), "regulator-switch-%s-enable\n", db8500_regulator_switch_enable()
154 dev_err(rdev_get_dev(rdev), db8500_regulator_switch_enable()
165 static int db8500_regulator_switch_disable(struct regulator_dev *rdev) db8500_regulator_switch_disable() argument
167 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_switch_disable()
173 dev_vdbg(rdev_get_dev(rdev), "regulator-switch-%s-disable\n", db8500_regulator_switch_disable()
178 dev_err(rdev_get_dev(rdev), db8500_regulator_switch_disable()
189 static int db8500_regulator_switch_is_enabled(struct regulator_dev *rdev) db8500_regulator_switch_is_enabled() argument
191 struct dbx500_regulator_info *info = rdev_get_drvdata(rdev); db8500_regulator_switch_is_enabled()
196 dev_vdbg(rdev_get_dev(rdev), db8500_regulator_switch_is_enabled()
436 info->rdev = devm_regulator_register(&pdev->dev, &info->desc, &config); db8500_regulator_register()
437 if (IS_ERR(info->rdev)) { db8500_regulator_register()
438 err = PTR_ERR(info->rdev); db8500_regulator_register()
444 dev_dbg(rdev_get_dev(info->rdev), db8500_regulator_register()
H A Dsky81452-regulator.c73 struct regulator_dev *rdev; sky81452_reg_probe() local
80 rdev = devm_regulator_register(dev, &sky81452_reg, &config); sky81452_reg_probe()
81 if (IS_ERR(rdev)) { sky81452_reg_probe()
82 dev_err(dev, "failed to register. err=%ld\n", PTR_ERR(rdev)); sky81452_reg_probe()
83 return PTR_ERR(rdev); sky81452_reg_probe()
86 platform_set_drvdata(pdev, rdev); sky81452_reg_probe()
H A Ds2mps11.c79 static int s2mps11_regulator_set_voltage_time_sel(struct regulator_dev *rdev, s2mps11_regulator_set_voltage_time_sel() argument
83 struct s2mps11_info *s2mps11 = rdev_get_drvdata(rdev); s2mps11_regulator_set_voltage_time_sel()
87 switch (rdev_get_id(rdev)) { s2mps11_regulator_set_voltage_time_sel()
112 ramp_delay = rdev->desc->ramp_delay; s2mps11_regulator_set_voltage_time_sel()
114 old_volt = rdev->desc->min_uV + (rdev->desc->uV_step * old_selector); s2mps11_regulator_set_voltage_time_sel()
115 new_volt = rdev->desc->min_uV + (rdev->desc->uV_step * new_selector); s2mps11_regulator_set_voltage_time_sel()
120 static int s2mps11_set_ramp_delay(struct regulator_dev *rdev, int ramp_delay) s2mps11_set_ramp_delay() argument
122 struct s2mps11_info *s2mps11 = rdev_get_drvdata(rdev); s2mps11_set_ramp_delay()
127 switch (rdev_get_id(rdev)) { s2mps11_set_ramp_delay()
217 if ((rdev_get_id(rdev) >= S2MPS11_BUCK2 && s2mps11_set_ramp_delay()
218 rdev_get_id(rdev) <= S2MPS11_BUCK4) || s2mps11_set_ramp_delay()
219 rdev_get_id(rdev) == S2MPS11_BUCK6) { s2mps11_set_ramp_delay()
220 ret = regmap_update_bits(rdev->regmap, S2MPS11_REG_RAMP, s2mps11_set_ramp_delay()
223 dev_err(&rdev->dev, "failed to enable ramp rate\n"); s2mps11_set_ramp_delay()
230 return regmap_update_bits(rdev->regmap, ramp_reg, 0x3 << ramp_shift, s2mps11_set_ramp_delay()
234 return regmap_update_bits(rdev->regmap, S2MPS11_REG_RAMP, s2mps11_set_ramp_delay()
513 static int s2mps14_regulator_enable(struct regulator_dev *rdev) s2mps14_regulator_enable() argument
515 struct s2mps11_info *s2mps11 = rdev_get_drvdata(rdev); s2mps14_regulator_enable()
521 if (test_bit(rdev_get_id(rdev), s2mps11->suspend_state)) s2mps14_regulator_enable()
523 else if (gpio_is_valid(s2mps11->ext_control_gpio[rdev_get_id(rdev)])) s2mps14_regulator_enable()
526 val = rdev->desc->enable_mask; s2mps14_regulator_enable()
529 if (test_bit(rdev_get_id(rdev), s2mps11->suspend_state)) s2mps14_regulator_enable()
532 val = rdev->desc->enable_mask; s2mps14_regulator_enable()
538 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, s2mps14_regulator_enable()
539 rdev->desc->enable_mask, val); s2mps14_regulator_enable()
542 static int s2mps14_regulator_set_suspend_disable(struct regulator_dev *rdev) s2mps14_regulator_set_suspend_disable() argument
546 struct s2mps11_info *s2mps11 = rdev_get_drvdata(rdev); s2mps14_regulator_set_suspend_disable()
547 int rdev_id = rdev_get_id(rdev); s2mps14_regulator_set_suspend_disable()
579 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); s2mps14_regulator_set_suspend_disable()
583 set_bit(rdev_get_id(rdev), s2mps11->suspend_state); s2mps14_regulator_set_suspend_disable()
591 if (!(val & rdev->desc->enable_mask)) s2mps14_regulator_set_suspend_disable()
594 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, s2mps14_regulator_set_suspend_disable()
595 rdev->desc->enable_mask, state); s2mps14_regulator_set_suspend_disable()
681 struct regulator_dev *rdev) s2mps14_pmic_enable_ext_control()
683 return regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, s2mps14_pmic_enable_ext_control()
684 rdev->desc->enable_mask, S2MPS14_ENABLE_EXT_CONTROL); s2mps14_pmic_enable_ext_control()
729 static int s2mpu02_set_ramp_delay(struct regulator_dev *rdev, int ramp_delay) s2mpu02_set_ramp_delay() argument
733 switch (rdev_get_id(rdev)) { s2mpu02_set_ramp_delay()
752 return regmap_update_bits(rdev->regmap, ramp_reg, s2mpu02_set_ramp_delay()
680 s2mps14_pmic_enable_ext_control(struct s2mps11_info *s2mps11, struct regulator_dev *rdev) s2mps14_pmic_enable_ext_control() argument
H A Dwm8350-regulator.c111 static int wm8350_isink_set_current(struct regulator_dev *rdev, int min_uA, wm8350_isink_set_current() argument
114 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_set_current()
115 int isink = rdev_get_id(rdev); wm8350_isink_set_current()
143 static int wm8350_isink_get_current(struct regulator_dev *rdev) wm8350_isink_get_current() argument
145 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_get_current()
146 int isink = rdev_get_id(rdev); wm8350_isink_get_current()
166 static int wm8350_isink_enable(struct regulator_dev *rdev) wm8350_isink_enable() argument
168 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_enable()
169 int isink = rdev_get_id(rdev); wm8350_isink_enable()
210 static int wm8350_isink_disable(struct regulator_dev *rdev) wm8350_isink_disable() argument
212 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_disable()
213 int isink = rdev_get_id(rdev); wm8350_isink_disable()
250 static int wm8350_isink_is_enabled(struct regulator_dev *rdev) wm8350_isink_is_enabled() argument
252 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_is_enabled()
253 int isink = rdev_get_id(rdev); wm8350_isink_is_enabled()
266 static int wm8350_isink_enable_time(struct regulator_dev *rdev) wm8350_isink_enable_time() argument
268 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_isink_enable_time()
269 int isink = rdev_get_id(rdev); wm8350_isink_enable_time()
335 static int wm8350_dcdc_set_suspend_voltage(struct regulator_dev *rdev, int uV) wm8350_dcdc_set_suspend_voltage() argument
337 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_set_suspend_voltage()
338 int sel, volt_reg, dcdc = rdev_get_id(rdev); wm8350_dcdc_set_suspend_voltage()
362 sel = regulator_map_voltage_linear(rdev, uV, uV); wm8350_dcdc_set_suspend_voltage()
372 static int wm8350_dcdc_set_suspend_enable(struct regulator_dev *rdev) wm8350_dcdc_set_suspend_enable() argument
374 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_set_suspend_enable()
375 int dcdc = rdev_get_id(rdev); wm8350_dcdc_set_suspend_enable()
412 static int wm8350_dcdc_set_suspend_disable(struct regulator_dev *rdev) wm8350_dcdc_set_suspend_disable() argument
414 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_set_suspend_disable()
415 int dcdc = rdev_get_id(rdev); wm8350_dcdc_set_suspend_disable()
452 static int wm8350_dcdc25_set_suspend_enable(struct regulator_dev *rdev) wm8350_dcdc25_set_suspend_enable() argument
454 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc25_set_suspend_enable()
455 int dcdc = rdev_get_id(rdev); wm8350_dcdc25_set_suspend_enable()
477 static int wm8350_dcdc25_set_suspend_disable(struct regulator_dev *rdev) wm8350_dcdc25_set_suspend_disable() argument
479 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc25_set_suspend_disable()
480 int dcdc = rdev_get_id(rdev); wm8350_dcdc25_set_suspend_disable()
502 static int wm8350_dcdc_set_suspend_mode(struct regulator_dev *rdev, wm8350_dcdc_set_suspend_mode() argument
505 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_set_suspend_mode()
506 int dcdc = rdev_get_id(rdev); wm8350_dcdc_set_suspend_mode()
550 static int wm8350_ldo_set_suspend_voltage(struct regulator_dev *rdev, int uV) wm8350_ldo_set_suspend_voltage() argument
552 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_ldo_set_suspend_voltage()
553 int sel, volt_reg, ldo = rdev_get_id(rdev); wm8350_ldo_set_suspend_voltage()
575 sel = regulator_map_voltage_linear_range(rdev, uV, uV); wm8350_ldo_set_suspend_voltage()
585 static int wm8350_ldo_set_suspend_enable(struct regulator_dev *rdev) wm8350_ldo_set_suspend_enable() argument
587 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_ldo_set_suspend_enable()
588 int volt_reg, ldo = rdev_get_id(rdev); wm8350_ldo_set_suspend_enable()
614 static int wm8350_ldo_set_suspend_disable(struct regulator_dev *rdev) wm8350_ldo_set_suspend_disable() argument
616 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_ldo_set_suspend_disable()
617 int volt_reg, ldo = rdev_get_id(rdev); wm8350_ldo_set_suspend_disable()
793 static int wm8350_dcdc_set_mode(struct regulator_dev *rdev, unsigned int mode) wm8350_dcdc_set_mode() argument
795 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_set_mode()
796 int dcdc = rdev_get_id(rdev); wm8350_dcdc_set_mode()
836 static unsigned int wm8350_dcdc_get_mode(struct regulator_dev *rdev) wm8350_dcdc_get_mode() argument
838 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_dcdc_get_mode()
839 int dcdc = rdev_get_id(rdev); wm8350_dcdc_get_mode()
882 static unsigned int wm8350_ldo_get_mode(struct regulator_dev *rdev) wm8350_ldo_get_mode() argument
922 static unsigned int wm8350_dcdc_get_optimum_mode(struct regulator_dev *rdev, wm8350_dcdc_get_optimum_mode() argument
926 int dcdc = rdev_get_id(rdev), mode; wm8350_dcdc_get_optimum_mode()
1153 struct regulator_dev *rdev = (struct regulator_dev *)data; pmic_uv_handler() local
1155 mutex_lock(&rdev->mutex); pmic_uv_handler()
1157 regulator_notifier_call_chain(rdev, pmic_uv_handler()
1161 regulator_notifier_call_chain(rdev, pmic_uv_handler()
1164 mutex_unlock(&rdev->mutex); pmic_uv_handler()
1173 struct regulator_dev *rdev; wm8350_regulator_probe() local
1206 rdev = devm_regulator_register(&pdev->dev, &wm8350_reg[pdev->id], wm8350_regulator_probe()
1208 if (IS_ERR(rdev)) { wm8350_regulator_probe()
1211 return PTR_ERR(rdev); wm8350_regulator_probe()
1216 pmic_uv_handler, 0, "UV", rdev); wm8350_regulator_probe()
1228 struct regulator_dev *rdev = platform_get_drvdata(pdev); wm8350_regulator_remove() local
1229 struct wm8350 *wm8350 = rdev_get_drvdata(rdev); wm8350_regulator_remove()
1231 wm8350_free_irq(wm8350, wm8350_reg[pdev->id].irq, rdev); wm8350_regulator_remove()
H A Dmax8649.c62 static int max8649_enable_time(struct regulator_dev *rdev) max8649_enable_time() argument
64 struct max8649_regulator_info *info = rdev_get_drvdata(rdev); max8649_enable_time()
69 ret = regmap_read(info->regmap, rdev->desc->vsel_reg, &val); max8649_enable_time()
73 voltage = regulator_list_voltage_linear(rdev, (unsigned char)val); max8649_enable_time()
85 static int max8649_set_mode(struct regulator_dev *rdev, unsigned int mode) max8649_set_mode() argument
87 struct max8649_regulator_info *info = rdev_get_drvdata(rdev); max8649_set_mode()
91 regmap_update_bits(info->regmap, rdev->desc->vsel_reg, max8649_set_mode()
95 regmap_update_bits(info->regmap, rdev->desc->vsel_reg, max8649_set_mode()
104 static unsigned int max8649_get_mode(struct regulator_dev *rdev) max8649_get_mode() argument
106 struct max8649_regulator_info *info = rdev_get_drvdata(rdev); max8649_get_mode()
110 ret = regmap_read(info->regmap, rdev->desc->vsel_reg, &val); max8649_get_mode()
H A Drc5t583-regulator.c52 struct regulator_dev *rdev; member in struct:rc5t583_regulator
55 static int rc5t583_regulator_enable_time(struct regulator_dev *rdev) rc5t583_regulator_enable_time() argument
57 struct rc5t583_regulator *reg = rdev_get_drvdata(rdev); rc5t583_regulator_enable_time()
58 int vsel = regulator_get_voltage_sel_regmap(rdev); rc5t583_regulator_enable_time()
59 int curr_uV = regulator_list_voltage_linear(rdev, vsel); rc5t583_regulator_enable_time()
125 struct regulator_dev *rdev; rc5t583_regulator_probe() local
167 rdev = devm_regulator_register(&pdev->dev, &ri->desc, &config); rc5t583_regulator_probe()
168 if (IS_ERR(rdev)) { rc5t583_regulator_probe()
171 return PTR_ERR(rdev); rc5t583_regulator_probe()
173 reg->rdev = rdev; rc5t583_regulator_probe()
H A Dda9210-regulator.c36 struct regulator_dev *rdev; member in struct:da9210
45 static int da9210_set_current_limit(struct regulator_dev *rdev, int min_uA,
47 static int da9210_get_current_limit(struct regulator_dev *rdev);
86 static int da9210_set_current_limit(struct regulator_dev *rdev, int min_uA, da9210_set_current_limit() argument
89 struct da9210 *chip = rdev_get_drvdata(rdev); da9210_set_current_limit()
108 static int da9210_get_current_limit(struct regulator_dev *rdev) da9210_get_current_limit() argument
110 struct da9210 *chip = rdev_get_drvdata(rdev); da9210_get_current_limit()
136 regulator_notifier_call_chain(chip->rdev, da9210_irq_handler()
142 regulator_notifier_call_chain(chip->rdev, da9210_irq_handler()
148 regulator_notifier_call_chain(chip->rdev, da9210_irq_handler()
153 regulator_notifier_call_chain(chip->rdev, da9210_irq_handler()
183 struct regulator_dev *rdev = NULL; da9210_i2c_probe() local
215 rdev = devm_regulator_register(&i2c->dev, &da9210_reg, &config); da9210_i2c_probe()
216 if (IS_ERR(rdev)) { da9210_i2c_probe()
218 return PTR_ERR(rdev); da9210_i2c_probe()
221 chip->rdev = rdev; da9210_i2c_probe()
H A Dwm831x-dcdc.c64 static unsigned int wm831x_dcdc_get_mode(struct regulator_dev *rdev) wm831x_dcdc_get_mode() argument
67 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_dcdc_get_mode()
119 static int wm831x_dcdc_set_mode(struct regulator_dev *rdev, unsigned int mode) wm831x_dcdc_set_mode() argument
121 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_dcdc_set_mode()
128 static int wm831x_dcdc_set_suspend_mode(struct regulator_dev *rdev, wm831x_dcdc_set_suspend_mode() argument
131 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_dcdc_set_suspend_mode()
138 static int wm831x_dcdc_get_status(struct regulator_dev *rdev) wm831x_dcdc_get_status() argument
140 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_dcdc_get_status()
149 if (ret & (1 << rdev_get_id(rdev))) { wm831x_dcdc_get_status()
151 rdev_get_id(rdev) + 1); wm831x_dcdc_get_status()
156 if (rdev_get_id(rdev) < 2) { wm831x_dcdc_get_status()
157 if (ret & (WM831X_DC1_OV_STS << rdev_get_id(rdev))) { wm831x_dcdc_get_status()
159 rdev_get_id(rdev) + 1); wm831x_dcdc_get_status()
163 if (ret & (WM831X_DC1_HC_STS << rdev_get_id(rdev))) { wm831x_dcdc_get_status()
165 rdev_get_id(rdev) + 1); wm831x_dcdc_get_status()
174 if (!(ret & (1 << rdev_get_id(rdev)))) wm831x_dcdc_get_status()
208 static int wm831x_buckv_list_voltage(struct regulator_dev *rdev, wm831x_buckv_list_voltage() argument
218 static int wm831x_buckv_map_voltage(struct regulator_dev *rdev, wm831x_buckv_map_voltage() argument
230 if (wm831x_buckv_list_voltage(rdev, vsel) > max_uV) wm831x_buckv_map_voltage()
236 static int wm831x_buckv_set_dvs(struct regulator_dev *rdev, int state) wm831x_buckv_set_dvs() argument
238 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_set_dvs()
254 static int wm831x_buckv_set_voltage_sel(struct regulator_dev *rdev, wm831x_buckv_set_voltage_sel() argument
257 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_set_voltage_sel()
265 return wm831x_buckv_set_dvs(rdev, 0); wm831x_buckv_set_voltage_sel()
268 return wm831x_buckv_set_dvs(rdev, 1); wm831x_buckv_set_voltage_sel()
280 ret = wm831x_buckv_set_dvs(rdev, 0); wm831x_buckv_set_voltage_sel()
304 static int wm831x_buckv_set_suspend_voltage(struct regulator_dev *rdev, wm831x_buckv_set_suspend_voltage() argument
307 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_set_suspend_voltage()
312 vsel = wm831x_buckv_map_voltage(rdev, uV, uV); wm831x_buckv_set_suspend_voltage()
319 static int wm831x_buckv_get_voltage_sel(struct regulator_dev *rdev) wm831x_buckv_get_voltage_sel() argument
321 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_get_voltage_sel()
334 static int wm831x_buckv_set_current_limit(struct regulator_dev *rdev, wm831x_buckv_set_current_limit() argument
337 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_set_current_limit()
353 static int wm831x_buckv_get_current_limit(struct regulator_dev *rdev) wm831x_buckv_get_current_limit() argument
355 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckv_get_current_limit()
574 static int wm831x_buckp_set_suspend_voltage(struct regulator_dev *rdev, int uV) wm831x_buckp_set_suspend_voltage() argument
576 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_buckp_set_suspend_voltage()
581 sel = regulator_map_voltage_linear(rdev, uV, uV); wm831x_buckp_set_suspend_voltage()
701 static int wm831x_boostp_get_status(struct regulator_dev *rdev) wm831x_boostp_get_status() argument
703 struct wm831x_dcdc *dcdc = rdev_get_drvdata(rdev); wm831x_boostp_get_status()
712 if (ret & (1 << rdev_get_id(rdev))) { wm831x_boostp_get_status()
714 rdev_get_id(rdev) + 1); wm831x_boostp_get_status()
722 if (ret & (1 << rdev_get_id(rdev))) wm831x_boostp_get_status()
H A Dwm831x-ldo.c70 static int wm831x_gp_ldo_set_suspend_voltage(struct regulator_dev *rdev, wm831x_gp_ldo_set_suspend_voltage() argument
73 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_gp_ldo_set_suspend_voltage()
77 sel = regulator_map_voltage_linear_range(rdev, uV, uV); wm831x_gp_ldo_set_suspend_voltage()
84 static unsigned int wm831x_gp_ldo_get_mode(struct regulator_dev *rdev) wm831x_gp_ldo_get_mode() argument
86 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_gp_ldo_get_mode()
109 static int wm831x_gp_ldo_set_mode(struct regulator_dev *rdev, wm831x_gp_ldo_set_mode() argument
112 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_gp_ldo_set_mode()
161 static int wm831x_gp_ldo_get_status(struct regulator_dev *rdev) wm831x_gp_ldo_get_status() argument
163 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_gp_ldo_get_status()
165 int mask = 1 << rdev_get_id(rdev); wm831x_gp_ldo_get_status()
182 ret = wm831x_gp_ldo_get_mode(rdev); wm831x_gp_ldo_get_status()
189 static unsigned int wm831x_gp_ldo_get_optimum_mode(struct regulator_dev *rdev, wm831x_gp_ldo_get_optimum_mode() argument
323 static int wm831x_aldo_set_suspend_voltage(struct regulator_dev *rdev, wm831x_aldo_set_suspend_voltage() argument
326 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_aldo_set_suspend_voltage()
330 sel = regulator_map_voltage_linear_range(rdev, uV, uV); wm831x_aldo_set_suspend_voltage()
337 static unsigned int wm831x_aldo_get_mode(struct regulator_dev *rdev) wm831x_aldo_get_mode() argument
339 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_aldo_get_mode()
354 static int wm831x_aldo_set_mode(struct regulator_dev *rdev, wm831x_aldo_set_mode() argument
357 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_aldo_set_mode()
384 static int wm831x_aldo_get_status(struct regulator_dev *rdev) wm831x_aldo_get_status() argument
386 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_aldo_get_status()
388 int mask = 1 << rdev_get_id(rdev); wm831x_aldo_get_status()
405 ret = wm831x_aldo_get_mode(rdev); wm831x_aldo_get_status()
529 static int wm831x_alive_ldo_set_suspend_voltage(struct regulator_dev *rdev, wm831x_alive_ldo_set_suspend_voltage() argument
532 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_alive_ldo_set_suspend_voltage()
536 sel = regulator_map_voltage_linear(rdev, uV, uV); wm831x_alive_ldo_set_suspend_voltage()
543 static int wm831x_alive_ldo_get_status(struct regulator_dev *rdev) wm831x_alive_ldo_get_status() argument
545 struct wm831x_ldo *ldo = rdev_get_drvdata(rdev); wm831x_alive_ldo_get_status()
547 int mask = 1 << rdev_get_id(rdev); wm831x_alive_ldo_get_status()
H A Darizona-ldo1.c40 static int arizona_ldo1_hc_list_voltage(struct regulator_dev *rdev, arizona_ldo1_hc_list_voltage() argument
43 if (selector >= rdev->desc->n_voltages) arizona_ldo1_hc_list_voltage()
46 if (selector == rdev->desc->n_voltages - 1) arizona_ldo1_hc_list_voltage()
49 return rdev->desc->min_uV + (rdev->desc->uV_step * selector); arizona_ldo1_hc_list_voltage()
52 static int arizona_ldo1_hc_map_voltage(struct regulator_dev *rdev, arizona_ldo1_hc_map_voltage() argument
57 sel = DIV_ROUND_UP(min_uV - rdev->desc->min_uV, rdev->desc->uV_step); arizona_ldo1_hc_map_voltage()
58 if (sel >= rdev->desc->n_voltages) arizona_ldo1_hc_map_voltage()
59 sel = rdev->desc->n_voltages - 1; arizona_ldo1_hc_map_voltage()
64 static int arizona_ldo1_hc_set_voltage_sel(struct regulator_dev *rdev, arizona_ldo1_hc_set_voltage_sel() argument
67 struct arizona_ldo1 *ldo = rdev_get_drvdata(rdev); arizona_ldo1_hc_set_voltage_sel()
72 if (sel == rdev->desc->n_voltages - 1) arizona_ldo1_hc_set_voltage_sel()
91 static int arizona_ldo1_hc_get_voltage_sel(struct regulator_dev *rdev) arizona_ldo1_hc_get_voltage_sel() argument
93 struct arizona_ldo1 *ldo = rdev_get_drvdata(rdev); arizona_ldo1_hc_get_voltage_sel()
103 return rdev->desc->n_voltages - 1; arizona_ldo1_hc_get_voltage_sel()
H A Dmax8997.c139 static int max8997_list_voltage_charger_cv(struct regulator_dev *rdev, max8997_list_voltage_charger_cv() argument
142 int rid = rdev_get_id(rdev); max8997_list_voltage_charger_cv()
161 static int max8997_list_voltage(struct regulator_dev *rdev, max8997_list_voltage() argument
165 int rid = rdev_get_id(rdev); max8997_list_voltage()
183 static int max8997_get_enable_register(struct regulator_dev *rdev, max8997_get_enable_register() argument
186 int rid = rdev_get_id(rdev); max8997_get_enable_register()
257 static int max8997_reg_is_enabled(struct regulator_dev *rdev) max8997_reg_is_enabled() argument
259 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_reg_is_enabled()
264 ret = max8997_get_enable_register(rdev, &reg, &mask, &pattern); max8997_reg_is_enabled()
275 static int max8997_reg_enable(struct regulator_dev *rdev) max8997_reg_enable() argument
277 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_reg_enable()
281 ret = max8997_get_enable_register(rdev, &reg, &mask, &pattern); max8997_reg_enable()
288 static int max8997_reg_disable(struct regulator_dev *rdev) max8997_reg_disable() argument
290 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_reg_disable()
294 ret = max8997_get_enable_register(rdev, &reg, &mask, &pattern); max8997_reg_disable()
301 static int max8997_get_voltage_register(struct regulator_dev *rdev, max8997_get_voltage_register() argument
304 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_get_voltage_register()
305 int rid = rdev_get_id(rdev); max8997_get_voltage_register()
367 static int max8997_get_voltage_sel(struct regulator_dev *rdev) max8997_get_voltage_sel() argument
369 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_get_voltage_sel()
374 ret = max8997_get_voltage_register(rdev, &reg, &shift, &mask); max8997_get_voltage_sel()
411 static int max8997_set_voltage_charger_cv(struct regulator_dev *rdev, max8997_set_voltage_charger_cv() argument
414 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_set_voltage_charger_cv()
416 int rid = rdev_get_id(rdev); max8997_set_voltage_charger_cv()
424 ret = max8997_get_voltage_register(rdev, &reg, &shift, &mask); max8997_set_voltage_charger_cv()
466 static int max8997_set_voltage_ldobuck(struct regulator_dev *rdev, max8997_set_voltage_ldobuck() argument
469 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_set_voltage_ldobuck()
472 int rid = rdev_get_id(rdev); max8997_set_voltage_ldobuck()
498 ret = max8997_get_voltage_register(rdev, &reg, &shift, &mask); max8997_set_voltage_ldobuck()
508 static int max8997_set_voltage_buck_time_sel(struct regulator_dev *rdev, max8997_set_voltage_buck_time_sel() argument
512 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_set_voltage_buck_time_sel()
513 int rid = rdev_get_id(rdev); max8997_set_voltage_buck_time_sel()
556 static int max8997_assess_side_effect(struct regulator_dev *rdev, max8997_assess_side_effect() argument
559 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_assess_side_effect()
560 int rid = rdev_get_id(rdev); max8997_assess_side_effect()
633 static int max8997_set_voltage_buck(struct regulator_dev *rdev, max8997_set_voltage_buck() argument
636 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_set_voltage_buck()
637 int rid = rdev_get_id(rdev); max8997_set_voltage_buck()
661 return max8997_set_voltage_ldobuck(rdev, min_uV, max_uV, max8997_set_voltage_buck()
673 damage = max8997_assess_side_effect(rdev, new_val, &new_idx); max8997_set_voltage_buck()
692 dev_warn(&rdev->dev, max8997_set_voltage_buck()
708 static int max8997_set_voltage_safeout_sel(struct regulator_dev *rdev, max8997_set_voltage_safeout_sel() argument
711 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_set_voltage_safeout_sel()
713 int rid = rdev_get_id(rdev); max8997_set_voltage_safeout_sel()
719 ret = max8997_get_voltage_register(rdev, &reg, &shift, &mask); max8997_set_voltage_safeout_sel()
726 static int max8997_reg_disable_suspend(struct regulator_dev *rdev) max8997_reg_disable_suspend() argument
728 struct max8997_data *max8997 = rdev_get_drvdata(rdev); max8997_reg_disable_suspend()
731 int rid = rdev_get_id(rdev); max8997_reg_disable_suspend()
733 ret = max8997_get_enable_register(rdev, &reg, &mask, &pattern); max8997_reg_disable_suspend()
742 dev_dbg(&rdev->dev, "Conditional Power-Off for %s\n", max8997_reg_disable_suspend()
743 rdev->desc->name); max8997_reg_disable_suspend()
747 dev_dbg(&rdev->dev, "Full Power-Off for %s (%xh -> %xh)\n", max8997_reg_disable_suspend()
748 rdev->desc->name, max8997->saved_states[rid] & mask, max8997_reg_disable_suspend()
798 static int max8997_set_current_limit(struct regulator_dev *rdev, max8997_set_current_limit() argument
802 int rid = rdev_get_id(rdev); max8997_set_current_limit()
808 return max8997_set_voltage_ldobuck(rdev, min_uA, max_uA, &dummy); max8997_set_current_limit()
811 static int max8997_get_current_limit(struct regulator_dev *rdev) max8997_get_current_limit() argument
813 int sel, rid = rdev_get_id(rdev); max8997_get_current_limit()
818 sel = max8997_get_voltage_sel(rdev); max8997_get_current_limit()
823 return max8997_list_voltage(rdev, sel); max8997_get_current_limit()
1032 struct regulator_dev *rdev; max8997_pmic_probe() local
1201 rdev = devm_regulator_register(&pdev->dev, &regulators[id], max8997_pmic_probe()
1203 if (IS_ERR(rdev)) { max8997_pmic_probe()
1206 return PTR_ERR(rdev); max8997_pmic_probe()
H A Dqcom_smd-regulator.c59 static int rpm_reg_enable(struct regulator_dev *rdev) rpm_reg_enable() argument
61 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_enable()
76 static int rpm_reg_is_enabled(struct regulator_dev *rdev) rpm_reg_is_enabled() argument
78 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_is_enabled()
83 static int rpm_reg_disable(struct regulator_dev *rdev) rpm_reg_disable() argument
85 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_disable()
100 static int rpm_reg_get_voltage(struct regulator_dev *rdev) rpm_reg_get_voltage() argument
102 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_get_voltage()
107 static int rpm_reg_set_voltage(struct regulator_dev *rdev, rpm_reg_set_voltage() argument
112 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_set_voltage()
127 static int rpm_reg_set_load(struct regulator_dev *rdev, int load_uA) rpm_reg_set_load() argument
129 struct qcom_rpm_reg *vreg = rdev_get_drvdata(rdev); rpm_reg_set_load()
287 struct regulator_dev *rdev; rpm_reg_probe() local
319 rdev = devm_regulator_register(&pdev->dev, &vreg->desc, &config); rpm_reg_probe()
320 if (IS_ERR(rdev)) { rpm_reg_probe()
322 return PTR_ERR(rdev); rpm_reg_probe()
H A Dstw481x-vmmc.c59 struct regulator_dev *rdev; stw481x_vmmc_regulator_probe() local
79 rdev = devm_regulator_register(&pdev->dev, &vmmc_regulator, &config); stw481x_vmmc_regulator_probe()
80 if (IS_ERR(rdev)) { stw481x_vmmc_regulator_probe()
83 return PTR_ERR(rdev); stw481x_vmmc_regulator_probe()
H A Dda9055-regulator.c81 struct regulator_dev *rdev; member in struct:da9055_regulator
85 static unsigned int da9055_buck_get_mode(struct regulator_dev *rdev) da9055_buck_get_mode() argument
87 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_buck_get_mode()
110 static int da9055_buck_set_mode(struct regulator_dev *rdev, da9055_buck_set_mode() argument
113 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_buck_set_mode()
133 static unsigned int da9055_ldo_get_mode(struct regulator_dev *rdev) da9055_ldo_get_mode() argument
135 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_ldo_get_mode()
149 static int da9055_ldo_set_mode(struct regulator_dev *rdev, unsigned int mode) da9055_ldo_set_mode() argument
151 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_ldo_set_mode()
171 static int da9055_buck_get_current_limit(struct regulator_dev *rdev) da9055_buck_get_current_limit() argument
173 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_buck_get_current_limit()
185 static int da9055_buck_set_current_limit(struct regulator_dev *rdev, int min_uA, da9055_buck_set_current_limit() argument
188 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_buck_set_current_limit()
204 static int da9055_regulator_get_voltage_sel(struct regulator_dev *rdev) da9055_regulator_get_voltage_sel() argument
206 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_regulator_get_voltage_sel()
235 static int da9055_regulator_set_voltage_sel(struct regulator_dev *rdev, da9055_regulator_set_voltage_sel() argument
238 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_regulator_set_voltage_sel()
278 static int da9055_regulator_set_suspend_voltage(struct regulator_dev *rdev, da9055_regulator_set_suspend_voltage() argument
281 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_regulator_set_suspend_voltage()
293 ret = regulator_map_voltage_linear(rdev, uV, uV); da9055_regulator_set_suspend_voltage()
301 static int da9055_suspend_enable(struct regulator_dev *rdev) da9055_suspend_enable() argument
303 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_suspend_enable()
314 static int da9055_suspend_disable(struct regulator_dev *rdev) da9055_suspend_disable() argument
316 struct da9055_regulator *regulator = rdev_get_drvdata(rdev); da9055_suspend_disable()
518 regulator_notifier_call_chain(regulator->rdev, da9055_ldo5_6_oc_irq()
628 regulator->rdev = devm_regulator_register(&pdev->dev, da9055_regulator_probe()
631 if (IS_ERR(regulator->rdev)) { da9055_regulator_probe()
634 return PTR_ERR(regulator->rdev); da9055_regulator_probe()
H A Dtwl-regulator.c142 static int twlreg_grp(struct regulator_dev *rdev) twlreg_grp() argument
144 return twlreg_read(rdev_get_drvdata(rdev), TWL_MODULE_PM_RECEIVER, twlreg_grp()
161 static int twl4030reg_is_enabled(struct regulator_dev *rdev) twl4030reg_is_enabled() argument
163 int state = twlreg_grp(rdev); twl4030reg_is_enabled()
171 static int twl6030reg_is_enabled(struct regulator_dev *rdev) twl6030reg_is_enabled() argument
173 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030reg_is_enabled()
177 grp = twlreg_grp(rdev); twl6030reg_is_enabled()
191 static int twl4030reg_enable(struct regulator_dev *rdev) twl4030reg_enable() argument
193 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030reg_enable()
197 grp = twlreg_grp(rdev); twl4030reg_enable()
208 static int twl6030reg_enable(struct regulator_dev *rdev) twl6030reg_enable() argument
210 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030reg_enable()
215 grp = twlreg_grp(rdev); twl6030reg_enable()
225 static int twl4030reg_disable(struct regulator_dev *rdev) twl4030reg_disable() argument
227 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030reg_disable()
231 grp = twlreg_grp(rdev); twl4030reg_disable()
242 static int twl6030reg_disable(struct regulator_dev *rdev) twl6030reg_disable() argument
244 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030reg_disable()
259 static int twl4030reg_get_status(struct regulator_dev *rdev) twl4030reg_get_status() argument
261 int state = twlreg_grp(rdev); twl4030reg_get_status()
275 static int twl6030reg_get_status(struct regulator_dev *rdev) twl6030reg_get_status() argument
277 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030reg_get_status()
280 val = twlreg_grp(rdev); twl6030reg_get_status()
302 static int twl4030reg_set_mode(struct regulator_dev *rdev, unsigned mode) twl4030reg_set_mode() argument
304 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030reg_set_mode()
321 status = twlreg_grp(rdev); twl4030reg_set_mode()
336 static int twl6030reg_set_mode(struct regulator_dev *rdev, unsigned mode) twl6030reg_set_mode() argument
338 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030reg_set_mode()
343 grp = twlreg_grp(rdev); twl6030reg_set_mode()
451 static int twl4030ldo_list_voltage(struct regulator_dev *rdev, unsigned index) twl4030ldo_list_voltage() argument
453 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030ldo_list_voltage()
460 twl4030ldo_set_voltage_sel(struct regulator_dev *rdev, unsigned selector) twl4030ldo_set_voltage_sel() argument
462 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030ldo_set_voltage_sel()
468 static int twl4030ldo_get_voltage_sel(struct regulator_dev *rdev) twl4030ldo_get_voltage_sel() argument
470 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030ldo_get_voltage_sel()
496 twl4030smps_set_voltage(struct regulator_dev *rdev, int min_uV, int max_uV, twl4030smps_set_voltage() argument
499 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030smps_set_voltage()
512 static int twl4030smps_get_voltage(struct regulator_dev *rdev) twl4030smps_get_voltage() argument
514 struct twlreg_info *info = rdev_get_drvdata(rdev); twl4030smps_get_voltage()
531 static int twl6030coresmps_set_voltage(struct regulator_dev *rdev, int min_uV, twl6030coresmps_set_voltage() argument
534 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030coresmps_set_voltage()
542 static int twl6030coresmps_get_voltage(struct regulator_dev *rdev) twl6030coresmps_get_voltage() argument
544 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030coresmps_get_voltage()
557 static int twl6030ldo_list_voltage(struct regulator_dev *rdev, unsigned sel) twl6030ldo_list_voltage() argument
559 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030ldo_list_voltage()
579 twl6030ldo_set_voltage_sel(struct regulator_dev *rdev, unsigned selector) twl6030ldo_set_voltage_sel() argument
581 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030ldo_set_voltage_sel()
587 static int twl6030ldo_get_voltage_sel(struct regulator_dev *rdev) twl6030ldo_get_voltage_sel() argument
589 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030ldo_get_voltage_sel()
640 static int twl6030smps_list_voltage(struct regulator_dev *rdev, unsigned index) twl6030smps_list_voltage() argument
642 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030smps_list_voltage()
727 static int twl6030smps_map_voltage(struct regulator_dev *rdev, int min_uV, twl6030smps_map_voltage() argument
730 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030smps_map_voltage()
801 static int twl6030smps_set_voltage_sel(struct regulator_dev *rdev, twl6030smps_set_voltage_sel() argument
804 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030smps_set_voltage_sel()
810 static int twl6030smps_get_voltage_sel(struct regulator_dev *rdev) twl6030smps_get_voltage_sel() argument
812 struct twlreg_info *info = rdev_get_drvdata(rdev); twl6030smps_get_voltage_sel()
1097 struct regulator_dev *rdev; twlreg_probe() local
1192 rdev = devm_regulator_register(&pdev->dev, &info->desc, &config); twlreg_probe()
1193 if (IS_ERR(rdev)) { twlreg_probe()
1195 info->desc.name, PTR_ERR(rdev)); twlreg_probe()
1196 return PTR_ERR(rdev); twlreg_probe()
1198 platform_set_drvdata(pdev, rdev); twlreg_probe()
H A Dwm831x-isink.c38 static int wm831x_isink_enable(struct regulator_dev *rdev) wm831x_isink_enable() argument
40 struct wm831x_isink *isink = rdev_get_drvdata(rdev); wm831x_isink_enable()
60 static int wm831x_isink_disable(struct regulator_dev *rdev) wm831x_isink_disable() argument
62 struct wm831x_isink *isink = rdev_get_drvdata(rdev); wm831x_isink_disable()
78 static int wm831x_isink_is_enabled(struct regulator_dev *rdev) wm831x_isink_is_enabled() argument
80 struct wm831x_isink *isink = rdev_get_drvdata(rdev); wm831x_isink_is_enabled()
95 static int wm831x_isink_set_current(struct regulator_dev *rdev, wm831x_isink_set_current() argument
98 struct wm831x_isink *isink = rdev_get_drvdata(rdev); wm831x_isink_set_current()
114 static int wm831x_isink_get_current(struct regulator_dev *rdev) wm831x_isink_get_current() argument
116 struct wm831x_isink *isink = rdev_get_drvdata(rdev); wm831x_isink_get_current()
H A Dlp8788-buck.c262 static int lp8788_buck12_set_voltage_sel(struct regulator_dev *rdev, lp8788_buck12_set_voltage_sel() argument
265 struct lp8788_buck *buck = rdev_get_drvdata(rdev); lp8788_buck12_set_voltage_sel()
266 enum lp8788_buck_id id = rdev_get_id(rdev); lp8788_buck12_set_voltage_sel()
279 static int lp8788_buck12_get_voltage_sel(struct regulator_dev *rdev) lp8788_buck12_get_voltage_sel() argument
281 struct lp8788_buck *buck = rdev_get_drvdata(rdev); lp8788_buck12_get_voltage_sel()
282 enum lp8788_buck_id id = rdev_get_id(rdev); lp8788_buck12_get_voltage_sel()
297 static int lp8788_buck_enable_time(struct regulator_dev *rdev) lp8788_buck_enable_time() argument
299 struct lp8788_buck *buck = rdev_get_drvdata(rdev); lp8788_buck_enable_time()
300 enum lp8788_buck_id id = rdev_get_id(rdev); lp8788_buck_enable_time()
311 static int lp8788_buck_set_mode(struct regulator_dev *rdev, unsigned int mode) lp8788_buck_set_mode() argument
313 struct lp8788_buck *buck = rdev_get_drvdata(rdev); lp8788_buck_set_mode()
314 enum lp8788_buck_id id = rdev_get_id(rdev); lp8788_buck_set_mode()
332 static unsigned int lp8788_buck_get_mode(struct regulator_dev *rdev) lp8788_buck_get_mode() argument
334 struct lp8788_buck *buck = rdev_get_drvdata(rdev); lp8788_buck_get_mode()
335 enum lp8788_buck_id id = rdev_get_id(rdev); lp8788_buck_get_mode()
497 struct regulator_dev *rdev; lp8788_buck_probe() local
518 rdev = devm_regulator_register(&pdev->dev, &lp8788_buck_desc[id], &cfg); lp8788_buck_probe()
519 if (IS_ERR(rdev)) { lp8788_buck_probe()
520 ret = PTR_ERR(rdev); lp8788_buck_probe()
526 buck->regulator = rdev; lp8788_buck_probe()
H A Dpcf50633-regulator.c81 struct regulator_dev *rdev; pcf50633_regulator_probe() local
93 rdev = devm_regulator_register(&pdev->dev, &regulators[pdev->id], pcf50633_regulator_probe()
95 if (IS_ERR(rdev)) pcf50633_regulator_probe()
96 return PTR_ERR(rdev); pcf50633_regulator_probe()
98 platform_set_drvdata(pdev, rdev); pcf50633_regulator_probe()
H A Dtps6524x-regulator.c140 struct regulator_dev *rdev[N_REGULATORS]; member in struct:tps6524x
457 static int set_voltage_sel(struct regulator_dev *rdev, unsigned selector) set_voltage_sel() argument
462 hw = rdev_get_drvdata(rdev); set_voltage_sel()
463 info = &supply_info[rdev_get_id(rdev)]; set_voltage_sel()
465 if (rdev->desc->n_voltages == 1) set_voltage_sel()
471 static int get_voltage_sel(struct regulator_dev *rdev) get_voltage_sel() argument
477 hw = rdev_get_drvdata(rdev); get_voltage_sel()
478 info = &supply_info[rdev_get_id(rdev)]; get_voltage_sel()
480 if (rdev->desc->n_voltages == 1) get_voltage_sel()
492 static int set_current_limit(struct regulator_dev *rdev, int min_uA, set_current_limit() argument
499 hw = rdev_get_drvdata(rdev); set_current_limit()
500 info = &supply_info[rdev_get_id(rdev)]; set_current_limit()
514 static int get_current_limit(struct regulator_dev *rdev) get_current_limit() argument
520 hw = rdev_get_drvdata(rdev); get_current_limit()
521 info = &supply_info[rdev_get_id(rdev)]; get_current_limit()
535 static int enable_supply(struct regulator_dev *rdev) enable_supply() argument
540 hw = rdev_get_drvdata(rdev); enable_supply()
541 info = &supply_info[rdev_get_id(rdev)]; enable_supply()
546 static int disable_supply(struct regulator_dev *rdev) disable_supply() argument
551 hw = rdev_get_drvdata(rdev); disable_supply()
552 info = &supply_info[rdev_get_id(rdev)]; disable_supply()
557 static int is_supply_enabled(struct regulator_dev *rdev) is_supply_enabled() argument
562 hw = rdev_get_drvdata(rdev); is_supply_enabled()
563 info = &supply_info[rdev_get_id(rdev)]; is_supply_enabled()
619 hw->rdev[i] = devm_regulator_register(dev, &hw->desc[i], pmic_probe()
621 if (IS_ERR(hw->rdev[i])) pmic_probe()
622 return PTR_ERR(hw->rdev[i]); pmic_probe()
H A Dmc13892-regulator.c337 static int mc13892_gpo_regulator_enable(struct regulator_dev *rdev) mc13892_gpo_regulator_enable() argument
339 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_gpo_regulator_enable()
340 int id = rdev_get_id(rdev); mc13892_gpo_regulator_enable()
344 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13892_gpo_regulator_enable()
356 static int mc13892_gpo_regulator_disable(struct regulator_dev *rdev) mc13892_gpo_regulator_disable() argument
358 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_gpo_regulator_disable()
359 int id = rdev_get_id(rdev); mc13892_gpo_regulator_disable()
362 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13892_gpo_regulator_disable()
372 static int mc13892_gpo_regulator_is_enabled(struct regulator_dev *rdev) mc13892_gpo_regulator_is_enabled() argument
374 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_gpo_regulator_is_enabled()
375 int ret, id = rdev_get_id(rdev); mc13892_gpo_regulator_is_enabled()
402 static int mc13892_sw_regulator_get_voltage_sel(struct regulator_dev *rdev) mc13892_sw_regulator_get_voltage_sel() argument
404 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_sw_regulator_get_voltage_sel()
405 int ret, id = rdev_get_id(rdev); mc13892_sw_regulator_get_voltage_sel()
408 dev_dbg(rdev_get_dev(rdev), "%s id: %d\n", __func__, id); mc13892_sw_regulator_get_voltage_sel()
435 dev_dbg(rdev_get_dev(rdev), "%s id: %d val: 0x%08x selector: %d\n", mc13892_sw_regulator_get_voltage_sel()
441 static int mc13892_sw_regulator_set_voltage_sel(struct regulator_dev *rdev, mc13892_sw_regulator_set_voltage_sel() argument
444 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_sw_regulator_set_voltage_sel()
445 int volt, mask, id = rdev_get_id(rdev); mc13892_sw_regulator_set_voltage_sel()
449 volt = rdev->desc->volt_table[selector]; mc13892_sw_regulator_set_voltage_sel()
493 static int mc13892_vcam_set_mode(struct regulator_dev *rdev, unsigned int mode) mc13892_vcam_set_mode() argument
496 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_vcam_set_mode()
497 int ret, id = rdev_get_id(rdev); mc13892_vcam_set_mode()
510 static unsigned int mc13892_vcam_get_mode(struct regulator_dev *rdev) mc13892_vcam_get_mode() argument
512 struct mc13xxx_regulator_priv *priv = rdev_get_drvdata(rdev); mc13892_vcam_get_mode()
513 int ret, id = rdev_get_id(rdev); mc13892_vcam_get_mode()
H A Dda9062-regulator.c62 struct regulator_dev *rdev; member in struct:da9062_regulator
105 static int da9062_set_current_limit(struct regulator_dev *rdev, da9062_set_current_limit() argument
108 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_set_current_limit()
121 static int da9062_get_current_limit(struct regulator_dev *rdev) da9062_get_current_limit() argument
123 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_get_current_limit()
138 static int da9062_buck_set_mode(struct regulator_dev *rdev, unsigned mode) da9062_buck_set_mode() argument
140 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_buck_set_mode()
166 static unsigned da9062_buck_get_mode(struct regulator_dev *rdev) da9062_buck_get_mode() argument
168 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_buck_get_mode()
219 static int da9062_ldo_set_mode(struct regulator_dev *rdev, unsigned mode) da9062_ldo_set_mode() argument
221 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_ldo_set_mode()
238 static unsigned da9062_ldo_get_mode(struct regulator_dev *rdev) da9062_ldo_get_mode() argument
240 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_ldo_get_mode()
265 static int da9062_buck_get_status(struct regulator_dev *rdev) da9062_buck_get_status() argument
267 int ret = regulator_is_enabled_regmap(rdev); da9062_buck_get_status()
272 ret = da9062_buck_get_mode(rdev); da9062_buck_get_status()
282 static int da9062_ldo_get_status(struct regulator_dev *rdev) da9062_ldo_get_status() argument
284 int ret = regulator_is_enabled_regmap(rdev); da9062_ldo_get_status()
289 ret = da9062_ldo_get_mode(rdev); da9062_ldo_get_status()
299 static int da9062_set_suspend_voltage(struct regulator_dev *rdev, int uv) da9062_set_suspend_voltage() argument
301 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_set_suspend_voltage()
305 sel = regulator_map_voltage_linear(rdev, uv, uv); da9062_set_suspend_voltage()
309 sel <<= ffs(rdev->desc->vsel_mask) - 1; da9062_set_suspend_voltage()
312 rdev->desc->vsel_mask, sel); da9062_set_suspend_voltage()
317 static int da9062_suspend_enable(struct regulator_dev *rdev) da9062_suspend_enable() argument
319 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_suspend_enable()
324 static int da9062_suspend_disable(struct regulator_dev *rdev) da9062_suspend_disable() argument
326 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_suspend_disable()
331 static int da9062_buck_set_suspend_mode(struct regulator_dev *rdev, da9062_buck_set_suspend_mode() argument
334 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_buck_set_suspend_mode()
354 static int da9062_ldo_set_suspend_mode(struct regulator_dev *rdev, da9062_ldo_set_suspend_mode() argument
357 struct da9062_regulator *regl = rdev_get_drvdata(rdev); da9062_ldo_set_suspend_mode()
714 regulator_notifier_call_chain(regl->rdev, da9062_ldo_lim_event()
785 regl->rdev = devm_regulator_register(&pdev->dev, &regl->desc, da9062_regulator_probe()
787 if (IS_ERR(regl->rdev)) { da9062_regulator_probe()
791 return PTR_ERR(regl->rdev); da9062_regulator_probe()
H A Dda9063-regulator.c107 struct regulator_dev *rdev; member in struct:da9063_regulator
163 static int da9063_set_current_limit(struct regulator_dev *rdev, da9063_set_current_limit() argument
166 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_set_current_limit()
179 static int da9063_get_current_limit(struct regulator_dev *rdev) da9063_get_current_limit() argument
181 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_get_current_limit()
196 static int da9063_buck_set_mode(struct regulator_dev *rdev, unsigned mode) da9063_buck_set_mode() argument
198 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_buck_set_mode()
224 static unsigned da9063_buck_get_mode(struct regulator_dev *rdev) da9063_buck_get_mode() argument
226 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_buck_get_mode()
277 static int da9063_ldo_set_mode(struct regulator_dev *rdev, unsigned mode) da9063_ldo_set_mode() argument
279 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_ldo_set_mode()
296 static unsigned da9063_ldo_get_mode(struct regulator_dev *rdev) da9063_ldo_get_mode() argument
298 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_ldo_get_mode()
323 static int da9063_buck_get_status(struct regulator_dev *rdev) da9063_buck_get_status() argument
325 int ret = regulator_is_enabled_regmap(rdev); da9063_buck_get_status()
330 ret = da9063_buck_get_mode(rdev); da9063_buck_get_status()
340 static int da9063_ldo_get_status(struct regulator_dev *rdev) da9063_ldo_get_status() argument
342 int ret = regulator_is_enabled_regmap(rdev); da9063_ldo_get_status()
347 ret = da9063_ldo_get_mode(rdev); da9063_ldo_get_status()
357 static int da9063_set_suspend_voltage(struct regulator_dev *rdev, int uV) da9063_set_suspend_voltage() argument
359 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_set_suspend_voltage()
363 sel = regulator_map_voltage_linear(rdev, uV, uV); da9063_set_suspend_voltage()
367 sel <<= ffs(rdev->desc->vsel_mask) - 1; da9063_set_suspend_voltage()
370 rdev->desc->vsel_mask, sel); da9063_set_suspend_voltage()
375 static int da9063_suspend_enable(struct regulator_dev *rdev) da9063_suspend_enable() argument
377 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_suspend_enable()
382 static int da9063_suspend_disable(struct regulator_dev *rdev) da9063_suspend_disable() argument
384 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_suspend_disable()
389 static int da9063_buck_set_suspend_mode(struct regulator_dev *rdev, unsigned mode) da9063_buck_set_suspend_mode() argument
391 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_buck_set_suspend_mode()
411 static int da9063_ldo_set_suspend_mode(struct regulator_dev *rdev, unsigned mode) da9063_ldo_set_suspend_mode() argument
413 struct da9063_regulator *regl = rdev_get_drvdata(rdev); da9063_ldo_set_suspend_mode()
611 regulator_notifier_call_chain(regl->rdev, da9063_ldo_lim_event()
848 regl->rdev = devm_regulator_register(&pdev->dev, &regl->desc, da9063_regulator_probe()
850 if (IS_ERR(regl->rdev)) { da9063_regulator_probe()
854 return PTR_ERR(regl->rdev); da9063_regulator_probe()
H A Dhi6421-regulator.c389 static int hi6421_regulator_enable(struct regulator_dev *rdev) hi6421_regulator_enable() argument
393 pdata = dev_get_drvdata(rdev->dev.parent); hi6421_regulator_enable()
402 regulator_enable_regmap(rdev); hi6421_regulator_enable()
408 static unsigned int hi6421_regulator_ldo_get_mode(struct regulator_dev *rdev) hi6421_regulator_ldo_get_mode() argument
410 struct hi6421_regulator_info *info = rdev_get_drvdata(rdev); hi6421_regulator_ldo_get_mode()
413 regmap_read(rdev->regmap, rdev->desc->enable_reg, &reg_val); hi6421_regulator_ldo_get_mode()
420 static unsigned int hi6421_regulator_buck_get_mode(struct regulator_dev *rdev) hi6421_regulator_buck_get_mode() argument
422 struct hi6421_regulator_info *info = rdev_get_drvdata(rdev); hi6421_regulator_buck_get_mode()
425 regmap_read(rdev->regmap, rdev->desc->enable_reg, &reg_val); hi6421_regulator_buck_get_mode()
432 static int hi6421_regulator_ldo_set_mode(struct regulator_dev *rdev, hi6421_regulator_ldo_set_mode() argument
435 struct hi6421_regulator_info *info = rdev_get_drvdata(rdev); hi6421_regulator_ldo_set_mode()
450 regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, hi6421_regulator_ldo_set_mode()
456 static int hi6421_regulator_buck_set_mode(struct regulator_dev *rdev, hi6421_regulator_buck_set_mode() argument
459 struct hi6421_regulator_info *info = rdev_get_drvdata(rdev); hi6421_regulator_buck_set_mode()
474 regmap_update_bits(rdev->regmap, rdev->desc->enable_reg, hi6421_regulator_buck_set_mode()
480 unsigned int hi6421_regulator_ldo_get_optimum_mode(struct regulator_dev *rdev, hi6421_regulator_ldo_get_optimum_mode() argument
483 struct hi6421_regulator_info *info = rdev_get_drvdata(rdev); hi6421_regulator_ldo_get_optimum_mode()
561 struct regulator_dev *rdev; hi6421_regulator_register() local
573 rdev = devm_regulator_register(&pdev->dev, &info->desc, &config); hi6421_regulator_register()
574 if (IS_ERR(rdev)) { hi6421_regulator_register()
577 return PTR_ERR(rdev); hi6421_regulator_register()
H A Dad5398.c28 struct regulator_dev *rdev; member in struct:ad5398_chip_info
67 static int ad5398_get_current_limit(struct regulator_dev *rdev) ad5398_get_current_limit() argument
69 struct ad5398_chip_info *chip = rdev_get_drvdata(rdev); ad5398_get_current_limit()
83 static int ad5398_set_current_limit(struct regulator_dev *rdev, int min_uA, int max_uA) ad5398_set_current_limit() argument
85 struct ad5398_chip_info *chip = rdev_get_drvdata(rdev); ad5398_set_current_limit()
123 static int ad5398_is_enabled(struct regulator_dev *rdev) ad5398_is_enabled() argument
125 struct ad5398_chip_info *chip = rdev_get_drvdata(rdev); ad5398_is_enabled()
140 static int ad5398_enable(struct regulator_dev *rdev) ad5398_enable() argument
142 struct ad5398_chip_info *chip = rdev_get_drvdata(rdev); ad5398_enable()
161 static int ad5398_disable(struct regulator_dev *rdev) ad5398_disable() argument
163 struct ad5398_chip_info *chip = rdev_get_drvdata(rdev); ad5398_disable()
242 chip->rdev = devm_regulator_register(&client->dev, &ad5398_reg, ad5398_probe()
244 if (IS_ERR(chip->rdev)) { ad5398_probe()
247 return PTR_ERR(chip->rdev); ad5398_probe()
H A Dmax1586.c69 static int max1586_v3_get_voltage_sel(struct regulator_dev *rdev) max1586_v3_get_voltage_sel() argument
71 struct max1586_data *max1586 = rdev_get_drvdata(rdev); max1586_v3_get_voltage_sel()
76 static int max1586_v3_set_voltage_sel(struct regulator_dev *rdev, max1586_v3_set_voltage_sel() argument
79 struct max1586_data *max1586 = rdev_get_drvdata(rdev); max1586_v3_set_voltage_sel()
85 regulator_list_voltage_linear(rdev, selector) / 1000); max1586_v3_set_voltage_sel()
97 static int max1586_v6_get_voltage_sel(struct regulator_dev *rdev) max1586_v6_get_voltage_sel() argument
99 struct max1586_data *max1586 = rdev_get_drvdata(rdev); max1586_v6_get_voltage_sel()
104 static int max1586_v6_set_voltage_sel(struct regulator_dev *rdev, max1586_v6_set_voltage_sel() argument
107 struct max1586_data *max1586 = rdev_get_drvdata(rdev); max1586_v6_set_voltage_sel()
113 rdev->desc->volt_table[selector] / 1000); max1586_v6_set_voltage_sel()
262 struct regulator_dev *rdev; max1586_pmic_probe() local
283 rdev = devm_regulator_register(&client->dev, max1586_pmic_probe()
285 if (IS_ERR(rdev)) { max1586_pmic_probe()
288 return PTR_ERR(rdev); max1586_pmic_probe()
H A Dda9211-regulator.c49 struct regulator_dev *rdev[DA9211_MAX_REGULATORS]; member in struct:da9211
102 static unsigned int da9211_buck_get_mode(struct regulator_dev *rdev) da9211_buck_get_mode() argument
104 int id = rdev_get_id(rdev); da9211_buck_get_mode()
105 struct da9211 *chip = rdev_get_drvdata(rdev); da9211_buck_get_mode()
128 static int da9211_buck_set_mode(struct regulator_dev *rdev, da9211_buck_set_mode() argument
131 int id = rdev_get_id(rdev); da9211_buck_set_mode()
132 struct da9211 *chip = rdev_get_drvdata(rdev); da9211_buck_set_mode()
151 static int da9211_set_current_limit(struct regulator_dev *rdev, int min, da9211_set_current_limit() argument
154 int id = rdev_get_id(rdev); da9211_set_current_limit()
155 struct da9211 *chip = rdev_get_drvdata(rdev); da9211_set_current_limit()
189 static int da9211_get_current_limit(struct regulator_dev *rdev) da9211_get_current_limit() argument
191 int id = rdev_get_id(rdev); da9211_get_current_limit()
192 struct da9211 *chip = rdev_get_drvdata(rdev); da9211_get_current_limit()
322 regulator_notifier_call_chain(chip->rdev[0], da9211_irq_handler()
334 regulator_notifier_call_chain(chip->rdev[1], da9211_irq_handler()
392 chip->rdev[i] = devm_regulator_register(chip->dev, da9211_regulator_init()
394 if (IS_ERR(chip->rdev[i])) { da9211_regulator_init()
397 return PTR_ERR(chip->rdev[i]); da9211_regulator_init()
/linux-4.4.14/drivers/infiniband/hw/cxgb4/
H A Dresource.c38 static int c4iw_init_qid_table(struct c4iw_rdev *rdev) c4iw_init_qid_table() argument
42 if (c4iw_id_table_alloc(&rdev->resource.qid_table, c4iw_init_qid_table()
43 rdev->lldi.vr->qp.start, c4iw_init_qid_table()
44 rdev->lldi.vr->qp.size, c4iw_init_qid_table()
45 rdev->lldi.vr->qp.size, 0)) c4iw_init_qid_table()
48 for (i = rdev->lldi.vr->qp.start; c4iw_init_qid_table()
49 i < rdev->lldi.vr->qp.start + rdev->lldi.vr->qp.size; i++) c4iw_init_qid_table()
50 if (!(i & rdev->qpmask)) c4iw_init_qid_table()
51 c4iw_id_free(&rdev->resource.qid_table, i); c4iw_init_qid_table()
56 int c4iw_init_resource(struct c4iw_rdev *rdev, u32 nr_tpt, u32 nr_pdid) c4iw_init_resource() argument
59 err = c4iw_id_table_alloc(&rdev->resource.tpt_table, 0, nr_tpt, 1, c4iw_init_resource()
63 err = c4iw_init_qid_table(rdev); c4iw_init_resource()
66 err = c4iw_id_table_alloc(&rdev->resource.pdid_table, 0, c4iw_init_resource()
72 c4iw_id_table_free(&rdev->resource.qid_table); c4iw_init_resource()
74 c4iw_id_table_free(&rdev->resource.tpt_table); c4iw_init_resource()
97 u32 c4iw_get_cqid(struct c4iw_rdev *rdev, struct c4iw_dev_ucontext *uctx) c4iw_get_cqid() argument
111 qid = c4iw_get_resource(&rdev->resource.qid_table); c4iw_get_cqid()
114 mutex_lock(&rdev->stats.lock); c4iw_get_cqid()
115 rdev->stats.qid.cur += rdev->qpmask + 1; c4iw_get_cqid()
116 mutex_unlock(&rdev->stats.lock); c4iw_get_cqid()
117 for (i = qid+1; i & rdev->qpmask; i++) { c4iw_get_cqid()
134 for (i = qid+1; i & rdev->qpmask; i++) { c4iw_get_cqid()
145 mutex_lock(&rdev->stats.lock); c4iw_get_cqid()
146 if (rdev->stats.qid.cur > rdev->stats.qid.max) c4iw_get_cqid()
147 rdev->stats.qid.max = rdev->stats.qid.cur; c4iw_get_cqid()
148 mutex_unlock(&rdev->stats.lock); c4iw_get_cqid()
152 void c4iw_put_cqid(struct c4iw_rdev *rdev, u32 qid, c4iw_put_cqid() argument
167 u32 c4iw_get_qpid(struct c4iw_rdev *rdev, struct c4iw_dev_ucontext *uctx) c4iw_get_qpid() argument
181 qid = c4iw_get_resource(&rdev->resource.qid_table); c4iw_get_qpid()
183 mutex_lock(&rdev->stats.lock); c4iw_get_qpid()
184 rdev->stats.qid.fail++; c4iw_get_qpid()
185 mutex_unlock(&rdev->stats.lock); c4iw_get_qpid()
188 mutex_lock(&rdev->stats.lock); c4iw_get_qpid()
189 rdev->stats.qid.cur += rdev->qpmask + 1; c4iw_get_qpid()
190 mutex_unlock(&rdev->stats.lock); c4iw_get_qpid()
191 for (i = qid+1; i & rdev->qpmask; i++) { c4iw_get_qpid()
208 for (i = qid; i & rdev->qpmask; i++) { c4iw_get_qpid()
219 mutex_lock(&rdev->stats.lock); c4iw_get_qpid()
220 if (rdev->stats.qid.cur > rdev->stats.qid.max) c4iw_get_qpid()
221 rdev->stats.qid.max = rdev->stats.qid.cur; c4iw_get_qpid()
222 mutex_unlock(&rdev->stats.lock); c4iw_get_qpid()
226 void c4iw_put_qpid(struct c4iw_rdev *rdev, u32 qid, c4iw_put_qpid() argument
254 u32 c4iw_pblpool_alloc(struct c4iw_rdev *rdev, int size) c4iw_pblpool_alloc() argument
256 unsigned long addr = gen_pool_alloc(rdev->pbl_pool, size); c4iw_pblpool_alloc()
258 mutex_lock(&rdev->stats.lock); c4iw_pblpool_alloc()
260 rdev->stats.pbl.cur += roundup(size, 1 << MIN_PBL_SHIFT); c4iw_pblpool_alloc()
261 if (rdev->stats.pbl.cur > rdev->stats.pbl.max) c4iw_pblpool_alloc()
262 rdev->stats.pbl.max = rdev->stats.pbl.cur; c4iw_pblpool_alloc()
264 rdev->stats.pbl.fail++; c4iw_pblpool_alloc()
265 mutex_unlock(&rdev->stats.lock); c4iw_pblpool_alloc()
269 void c4iw_pblpool_free(struct c4iw_rdev *rdev, u32 addr, int size) c4iw_pblpool_free() argument
272 mutex_lock(&rdev->stats.lock); c4iw_pblpool_free()
273 rdev->stats.pbl.cur -= roundup(size, 1 << MIN_PBL_SHIFT); c4iw_pblpool_free()
274 mutex_unlock(&rdev->stats.lock); c4iw_pblpool_free()
275 gen_pool_free(rdev->pbl_pool, (unsigned long)addr, size); c4iw_pblpool_free()
278 int c4iw_pblpool_create(struct c4iw_rdev *rdev) c4iw_pblpool_create() argument
282 rdev->pbl_pool = gen_pool_create(MIN_PBL_SHIFT, -1); c4iw_pblpool_create()
283 if (!rdev->pbl_pool) c4iw_pblpool_create()
286 pbl_start = rdev->lldi.vr->pbl.start; c4iw_pblpool_create()
287 pbl_chunk = rdev->lldi.vr->pbl.size; c4iw_pblpool_create()
292 if (gen_pool_add(rdev->pbl_pool, pbl_start, pbl_chunk, -1)) { c4iw_pblpool_create()
313 void c4iw_pblpool_destroy(struct c4iw_rdev *rdev) c4iw_pblpool_destroy() argument
315 gen_pool_destroy(rdev->pbl_pool); c4iw_pblpool_destroy()
324 u32 c4iw_rqtpool_alloc(struct c4iw_rdev *rdev, int size) c4iw_rqtpool_alloc() argument
326 unsigned long addr = gen_pool_alloc(rdev->rqt_pool, size << 6); c4iw_rqtpool_alloc()
330 pci_name(rdev->lldi.pdev)); c4iw_rqtpool_alloc()
331 mutex_lock(&rdev->stats.lock); c4iw_rqtpool_alloc()
333 rdev->stats.rqt.cur += roundup(size << 6, 1 << MIN_RQT_SHIFT); c4iw_rqtpool_alloc()
334 if (rdev->stats.rqt.cur > rdev->stats.rqt.max) c4iw_rqtpool_alloc()
335 rdev->stats.rqt.max = rdev->stats.rqt.cur; c4iw_rqtpool_alloc()
337 rdev->stats.rqt.fail++; c4iw_rqtpool_alloc()
338 mutex_unlock(&rdev->stats.lock); c4iw_rqtpool_alloc()
342 void c4iw_rqtpool_free(struct c4iw_rdev *rdev, u32 addr, int size) c4iw_rqtpool_free() argument
345 mutex_lock(&rdev->stats.lock); c4iw_rqtpool_free()
346 rdev->stats.rqt.cur -= roundup(size << 6, 1 << MIN_RQT_SHIFT); c4iw_rqtpool_free()
347 mutex_unlock(&rdev->stats.lock); c4iw_rqtpool_free()
348 gen_pool_free(rdev->rqt_pool, (unsigned long)addr, size << 6); c4iw_rqtpool_free()
351 int c4iw_rqtpool_create(struct c4iw_rdev *rdev) c4iw_rqtpool_create() argument
355 rdev->rqt_pool = gen_pool_create(MIN_RQT_SHIFT, -1); c4iw_rqtpool_create()
356 if (!rdev->rqt_pool) c4iw_rqtpool_create()
359 rqt_start = rdev->lldi.vr->rq.start; c4iw_rqtpool_create()
360 rqt_chunk = rdev->lldi.vr->rq.size; c4iw_rqtpool_create()
365 if (gen_pool_add(rdev->rqt_pool, rqt_start, rqt_chunk, -1)) { c4iw_rqtpool_create()
384 void c4iw_rqtpool_destroy(struct c4iw_rdev *rdev) c4iw_rqtpool_destroy() argument
386 gen_pool_destroy(rdev->rqt_pool); c4iw_rqtpool_destroy()
394 u32 c4iw_ocqp_pool_alloc(struct c4iw_rdev *rdev, int size) c4iw_ocqp_pool_alloc() argument
396 unsigned long addr = gen_pool_alloc(rdev->ocqp_pool, size); c4iw_ocqp_pool_alloc()
399 mutex_lock(&rdev->stats.lock); c4iw_ocqp_pool_alloc()
400 rdev->stats.ocqp.cur += roundup(size, 1 << MIN_OCQP_SHIFT); c4iw_ocqp_pool_alloc()
401 if (rdev->stats.ocqp.cur > rdev->stats.ocqp.max) c4iw_ocqp_pool_alloc()
402 rdev->stats.ocqp.max = rdev->stats.ocqp.cur; c4iw_ocqp_pool_alloc()
403 mutex_unlock(&rdev->stats.lock); c4iw_ocqp_pool_alloc()
408 void c4iw_ocqp_pool_free(struct c4iw_rdev *rdev, u32 addr, int size) c4iw_ocqp_pool_free() argument
411 mutex_lock(&rdev->stats.lock); c4iw_ocqp_pool_free()
412 rdev->stats.ocqp.cur -= roundup(size, 1 << MIN_OCQP_SHIFT); c4iw_ocqp_pool_free()
413 mutex_unlock(&rdev->stats.lock); c4iw_ocqp_pool_free()
414 gen_pool_free(rdev->ocqp_pool, (unsigned long)addr, size); c4iw_ocqp_pool_free()
417 int c4iw_ocqp_pool_create(struct c4iw_rdev *rdev) c4iw_ocqp_pool_create() argument
421 rdev->ocqp_pool = gen_pool_create(MIN_OCQP_SHIFT, -1); c4iw_ocqp_pool_create()
422 if (!rdev->ocqp_pool) c4iw_ocqp_pool_create()
425 start = rdev->lldi.vr->ocq.start; c4iw_ocqp_pool_create()
426 chunk = rdev->lldi.vr->ocq.size; c4iw_ocqp_pool_create()
431 if (gen_pool_add(rdev->ocqp_pool, start, chunk, -1)) { c4iw_ocqp_pool_create()
450 void c4iw_ocqp_pool_destroy(struct c4iw_rdev *rdev) c4iw_ocqp_pool_destroy() argument
452 gen_pool_destroy(rdev->ocqp_pool); c4iw_ocqp_pool_destroy()
H A Ddevice.c122 if (!wq->rdev->wr_log) c4iw_log_wr_stats()
125 idx = (atomic_inc_return(&wq->rdev->wr_log_idx) - 1) & c4iw_log_wr_stats()
126 (wq->rdev->wr_log_size - 1); c4iw_log_wr_stats()
127 le.poll_sge_ts = cxgb4_read_sge_timestamp(wq->rdev->lldi.ports[0]); c4iw_log_wr_stats()
144 wq->rdev->wr_log[idx] = le; c4iw_log_wr_stats()
155 #define ts2ns(ts) div64_u64((ts) * dev->rdev.lldi.cclk_ps, 1000) wr_log_show()
157 idx = atomic_read(&dev->rdev.wr_log_idx) & wr_log_show()
158 (dev->rdev.wr_log_size - 1); wr_log_show()
161 end = dev->rdev.wr_log_size - 1; wr_log_show()
162 lep = &dev->rdev.wr_log[idx]; wr_log_show()
194 if (idx > (dev->rdev.wr_log_size - 1)) wr_log_show()
196 lep = &dev->rdev.wr_log[idx]; wr_log_show()
213 if (dev->rdev.wr_log) wr_log_clear()
214 for (i = 0; i < dev->rdev.wr_log_size; i++) wr_log_clear()
215 dev->rdev.wr_log[i].valid = 0; wr_log_clear()
373 ret = cxgb4_read_tpte(stagd->devp->rdev.lldi.ports[0], (u32)id<<8, dump_stag()
376 dev_err(&stagd->devp->rdev.lldi.pdev->dev, dump_stag()
464 dev->rdev.stats.pd.total, dev->rdev.stats.pd.cur, stats_show()
465 dev->rdev.stats.pd.max, dev->rdev.stats.pd.fail); stats_show()
467 dev->rdev.stats.qid.total, dev->rdev.stats.qid.cur, stats_show()
468 dev->rdev.stats.qid.max, dev->rdev.stats.qid.fail); stats_show()
470 dev->rdev.stats.stag.total, dev->rdev.stats.stag.cur, stats_show()
471 dev->rdev.stats.stag.max, dev->rdev.stats.stag.fail); stats_show()
473 dev->rdev.stats.pbl.total, dev->rdev.stats.pbl.cur, stats_show()
474 dev->rdev.stats.pbl.max, dev->rdev.stats.pbl.fail); stats_show()
476 dev->rdev.stats.rqt.total, dev->rdev.stats.rqt.cur, stats_show()
477 dev->rdev.stats.rqt.max, dev->rdev.stats.rqt.fail); stats_show()
479 dev->rdev.stats.ocqp.total, dev->rdev.stats.ocqp.cur, stats_show()
480 dev->rdev.stats.ocqp.max, dev->rdev.stats.ocqp.fail); stats_show()
481 seq_printf(seq, " DB FULL: %10llu\n", dev->rdev.stats.db_full); stats_show()
482 seq_printf(seq, " DB EMPTY: %10llu\n", dev->rdev.stats.db_empty); stats_show()
483 seq_printf(seq, " DB DROP: %10llu\n", dev->rdev.stats.db_drop); stats_show()
486 dev->rdev.stats.db_state_transitions, stats_show()
487 dev->rdev.stats.db_fc_interruptions); stats_show()
488 seq_printf(seq, "TCAM_FULL: %10llu\n", dev->rdev.stats.tcam_full); stats_show()
490 dev->rdev.stats.act_ofld_conn_fails); stats_show()
492 dev->rdev.stats.pas_ofld_conn_fails); stats_show()
493 seq_printf(seq, "NEG_ADV_RCVD: %10llu\n", dev->rdev.stats.neg_adv); stats_show()
508 mutex_lock(&dev->rdev.stats.lock); stats_clear()
509 dev->rdev.stats.pd.max = 0; stats_clear()
510 dev->rdev.stats.pd.fail = 0; stats_clear()
511 dev->rdev.stats.qid.max = 0; stats_clear()
512 dev->rdev.stats.qid.fail = 0; stats_clear()
513 dev->rdev.stats.stag.max = 0; stats_clear()
514 dev->rdev.stats.stag.fail = 0; stats_clear()
515 dev->rdev.stats.pbl.max = 0; stats_clear()
516 dev->rdev.stats.pbl.fail = 0; stats_clear()
517 dev->rdev.stats.rqt.max = 0; stats_clear()
518 dev->rdev.stats.rqt.fail = 0; stats_clear()
519 dev->rdev.stats.ocqp.max = 0; stats_clear()
520 dev->rdev.stats.ocqp.fail = 0; stats_clear()
521 dev->rdev.stats.db_full = 0; stats_clear()
522 dev->rdev.stats.db_empty = 0; stats_clear()
523 dev->rdev.stats.db_drop = 0; stats_clear()
524 dev->rdev.stats.db_state_transitions = 0; stats_clear()
525 dev->rdev.stats.tcam_full = 0; stats_clear()
526 dev->rdev.stats.act_ofld_conn_fails = 0; stats_clear()
527 dev->rdev.stats.pas_ofld_conn_fails = 0; stats_clear()
528 mutex_unlock(&dev->rdev.stats.lock); stats_clear()
732 void c4iw_release_dev_ucontext(struct c4iw_rdev *rdev, c4iw_release_dev_ucontext() argument
742 if (!(entry->qid & rdev->qpmask)) { c4iw_release_dev_ucontext()
743 c4iw_put_resource(&rdev->resource.qid_table, c4iw_release_dev_ucontext()
745 mutex_lock(&rdev->stats.lock); c4iw_release_dev_ucontext()
746 rdev->stats.qid.cur -= rdev->qpmask + 1; c4iw_release_dev_ucontext()
747 mutex_unlock(&rdev->stats.lock); c4iw_release_dev_ucontext()
760 void c4iw_init_dev_ucontext(struct c4iw_rdev *rdev, c4iw_init_dev_ucontext() argument
769 static int c4iw_rdev_open(struct c4iw_rdev *rdev) c4iw_rdev_open() argument
773 c4iw_init_dev_ucontext(rdev, &rdev->uctx); c4iw_rdev_open()
780 if (rdev->lldi.udb_density != rdev->lldi.ucq_density) { c4iw_rdev_open()
782 pci_name(rdev->lldi.pdev), rdev->lldi.udb_density, c4iw_rdev_open()
783 rdev->lldi.ucq_density); c4iw_rdev_open()
787 if (rdev->lldi.vr->qp.start != rdev->lldi.vr->cq.start || c4iw_rdev_open()
788 rdev->lldi.vr->qp.size != rdev->lldi.vr->cq.size) { c4iw_rdev_open()
791 pci_name(rdev->lldi.pdev), rdev->lldi.vr->qp.start, c4iw_rdev_open()
792 rdev->lldi.vr->qp.size, rdev->lldi.vr->cq.size, c4iw_rdev_open()
793 rdev->lldi.vr->cq.size); c4iw_rdev_open()
798 rdev->qpmask = rdev->lldi.udb_density - 1; c4iw_rdev_open()
799 rdev->cqmask = rdev->lldi.ucq_density - 1; c4iw_rdev_open()
803 __func__, pci_name(rdev->lldi.pdev), rdev->lldi.vr->stag.start, c4iw_rdev_open()
804 rdev->lldi.vr->stag.size, c4iw_num_stags(rdev), c4iw_rdev_open()
805 rdev->lldi.vr->pbl.start, c4iw_rdev_open()
806 rdev->lldi.vr->pbl.size, rdev->lldi.vr->rq.start, c4iw_rdev_open()
807 rdev->lldi.vr->rq.size, c4iw_rdev_open()
808 rdev->lldi.vr->qp.start, c4iw_rdev_open()
809 rdev->lldi.vr->qp.size, c4iw_rdev_open()
810 rdev->lldi.vr->cq.start, c4iw_rdev_open()
811 rdev->lldi.vr->cq.size); c4iw_rdev_open()
814 (unsigned)pci_resource_len(rdev->lldi.pdev, 2), c4iw_rdev_open()
815 (void *)pci_resource_start(rdev->lldi.pdev, 2), c4iw_rdev_open()
816 rdev->lldi.db_reg, rdev->lldi.gts_reg, c4iw_rdev_open()
817 rdev->qpmask, rdev->cqmask); c4iw_rdev_open()
819 if (c4iw_num_stags(rdev) == 0) { c4iw_rdev_open()
824 rdev->stats.pd.total = T4_MAX_NUM_PD; c4iw_rdev_open()
825 rdev->stats.stag.total = rdev->lldi.vr->stag.size; c4iw_rdev_open()
826 rdev->stats.pbl.total = rdev->lldi.vr->pbl.size; c4iw_rdev_open()
827 rdev->stats.rqt.total = rdev->lldi.vr->rq.size; c4iw_rdev_open()
828 rdev->stats.ocqp.total = rdev->lldi.vr->ocq.size; c4iw_rdev_open()
829 rdev->stats.qid.total = rdev->lldi.vr->qp.size; c4iw_rdev_open()
831 err = c4iw_init_resource(rdev, c4iw_num_stags(rdev), T4_MAX_NUM_PD); c4iw_rdev_open()
836 err = c4iw_pblpool_create(rdev); c4iw_rdev_open()
841 err = c4iw_rqtpool_create(rdev); c4iw_rdev_open()
846 err = c4iw_ocqp_pool_create(rdev); c4iw_rdev_open()
851 rdev->status_page = (struct t4_dev_status_page *) c4iw_rdev_open()
853 if (!rdev->status_page) { c4iw_rdev_open()
859 rdev->wr_log = kzalloc((1 << c4iw_wr_log_size_order) * c4iw_rdev_open()
860 sizeof(*rdev->wr_log), GFP_KERNEL); c4iw_rdev_open()
861 if (rdev->wr_log) { c4iw_rdev_open()
862 rdev->wr_log_size = 1 << c4iw_wr_log_size_order; c4iw_rdev_open()
863 atomic_set(&rdev->wr_log_idx, 0); c4iw_rdev_open()
869 rdev->status_page->db_off = 0; c4iw_rdev_open()
873 c4iw_rqtpool_destroy(rdev); c4iw_rdev_open()
875 c4iw_pblpool_destroy(rdev); c4iw_rdev_open()
877 c4iw_destroy_resource(&rdev->resource); c4iw_rdev_open()
882 static void c4iw_rdev_close(struct c4iw_rdev *rdev) c4iw_rdev_close() argument
884 kfree(rdev->wr_log); c4iw_rdev_close()
885 free_page((unsigned long)rdev->status_page); c4iw_rdev_close()
886 c4iw_pblpool_destroy(rdev); c4iw_rdev_close()
887 c4iw_rqtpool_destroy(rdev); c4iw_rdev_close()
888 c4iw_destroy_resource(&rdev->resource); c4iw_rdev_close()
893 c4iw_rdev_close(&ctx->dev->rdev); c4iw_dealloc()
900 if (ctx->dev->rdev.bar2_kva) c4iw_dealloc()
901 iounmap(ctx->dev->rdev.bar2_kva); c4iw_dealloc()
902 if (ctx->dev->rdev.oc_mw_kva) c4iw_dealloc()
903 iounmap(ctx->dev->rdev.oc_mw_kva); c4iw_dealloc()
941 devp->rdev.lldi = *infop; c4iw_alloc()
945 __func__, devp->rdev.lldi.sge_ingpadboundary, c4iw_alloc()
946 devp->rdev.lldi.sge_egrstatuspagesize); c4iw_alloc()
948 devp->rdev.hw_queue.t4_eq_status_entries = c4iw_alloc()
949 devp->rdev.lldi.sge_ingpadboundary > 64 ? 2 : 1; c4iw_alloc()
950 devp->rdev.hw_queue.t4_max_eq_size = 65520; c4iw_alloc()
951 devp->rdev.hw_queue.t4_max_iq_size = 65520; c4iw_alloc()
952 devp->rdev.hw_queue.t4_max_rq_size = 8192 - c4iw_alloc()
953 devp->rdev.hw_queue.t4_eq_status_entries - 1; c4iw_alloc()
954 devp->rdev.hw_queue.t4_max_sq_size = c4iw_alloc()
955 devp->rdev.hw_queue.t4_max_eq_size - c4iw_alloc()
956 devp->rdev.hw_queue.t4_eq_status_entries - 1; c4iw_alloc()
957 devp->rdev.hw_queue.t4_max_qp_depth = c4iw_alloc()
958 devp->rdev.hw_queue.t4_max_rq_size; c4iw_alloc()
959 devp->rdev.hw_queue.t4_max_cq_depth = c4iw_alloc()
960 devp->rdev.hw_queue.t4_max_iq_size - 2; c4iw_alloc()
961 devp->rdev.hw_queue.t4_stat_len = c4iw_alloc()
962 devp->rdev.lldi.sge_egrstatuspagesize; c4iw_alloc()
969 devp->rdev.bar2_pa = pci_resource_start(devp->rdev.lldi.pdev, 2); c4iw_alloc()
970 if (!is_t4(devp->rdev.lldi.adapter_type)) { c4iw_alloc()
971 devp->rdev.bar2_kva = ioremap_wc(devp->rdev.bar2_pa, c4iw_alloc()
972 pci_resource_len(devp->rdev.lldi.pdev, 2)); c4iw_alloc()
973 if (!devp->rdev.bar2_kva) { c4iw_alloc()
979 devp->rdev.oc_mw_pa = c4iw_alloc()
980 pci_resource_start(devp->rdev.lldi.pdev, 2) + c4iw_alloc()
981 pci_resource_len(devp->rdev.lldi.pdev, 2) - c4iw_alloc()
982 roundup_pow_of_two(devp->rdev.lldi.vr->ocq.size); c4iw_alloc()
983 devp->rdev.oc_mw_kva = ioremap_wc(devp->rdev.oc_mw_pa, c4iw_alloc()
984 devp->rdev.lldi.vr->ocq.size); c4iw_alloc()
985 if (!devp->rdev.oc_mw_kva) { c4iw_alloc()
994 devp->rdev.lldi.vr->ocq.start, devp->rdev.lldi.vr->ocq.size, c4iw_alloc()
995 devp->rdev.oc_mw_pa, devp->rdev.oc_mw_kva); c4iw_alloc()
997 ret = c4iw_rdev_open(&devp->rdev); c4iw_alloc()
999 printk(KERN_ERR MOD "Unable to open CXIO rdev err %d\n", ret); c4iw_alloc()
1011 mutex_init(&devp->rdev.stats.lock); c4iw_alloc()
1014 devp->avail_ird = devp->rdev.lldi.max_ird_adapter; c4iw_alloc()
1018 pci_name(devp->rdev.lldi.pdev), c4iw_alloc()
1106 skb = copy_gl_to_skb_pkt(gl , rsp, dev->rdev.lldi.sge_pktshift); recv_rx_pkt()
1219 ctx->dev->rdev.flags |= T4_FATAL_ERROR; c4iw_uld_state_change()
1250 ctx->dev->rdev.stats.db_state_transitions++; stop_queues()
1252 if (ctx->dev->rdev.flags & T4_STATUS_PAGE_DISABLED) stop_queues()
1255 ctx->dev->rdev.status_page->db_off = 1; stop_queues()
1302 ctx->dev->rdev.stats.db_state_transitions++; resume_queues()
1303 if (ctx->dev->rdev.flags & T4_STATUS_PAGE_DISABLED) { resume_queues()
1307 ctx->dev->rdev.status_page->db_off = 0; resume_queues()
1311 if (cxgb4_dbfifo_count(ctx->dev->rdev.lldi.ports[0], 1) resume_queues()
1312 < (ctx->dev->rdev.lldi.dbfifo_int_thresh << resume_queues()
1330 ctx->dev->rdev.stats.db_fc_interruptions++; resume_queues()
1374 ret = cxgb4_sync_txq_pidx(qp->rhp->rdev.lldi.ports[0], recover_lost_dbs()
1389 ret = cxgb4_sync_txq_pidx(qp->rhp->rdev.lldi.ports[0], recover_lost_dbs()
1408 while (cxgb4_dbfifo_count(qp->rhp->rdev.lldi.ports[0], 1) > 0) { recover_lost_dbs()
1426 ret = cxgb4_flush_eq_cache(ctx->dev->rdev.lldi.ports[0]); recover_queues()
1473 ctx->dev->rdev.stats.db_full++; c4iw_uld_control()
1477 mutex_lock(&ctx->dev->rdev.stats.lock); c4iw_uld_control()
1478 ctx->dev->rdev.stats.db_empty++; c4iw_uld_control()
1479 mutex_unlock(&ctx->dev->rdev.stats.lock); c4iw_uld_control()
1483 mutex_lock(&ctx->dev->rdev.stats.lock); c4iw_uld_control()
1484 ctx->dev->rdev.stats.db_drop++; c4iw_uld_control()
1485 mutex_unlock(&ctx->dev->rdev.stats.lock); c4iw_uld_control()
H A Dprovider.c103 c4iw_release_dev_ucontext(&rhp->rdev, &ucontext->uctx); c4iw_dealloc_ucontext()
125 c4iw_init_dev_ucontext(&rhp->rdev, &context->uctx); c4iw_alloc_ucontext()
132 rhp->rdev.flags |= T4_STATUS_PAGE_DISABLED; c4iw_alloc_ucontext()
153 mm->addr = virt_to_phys(rhp->rdev.status_page); c4iw_alloc_ucontext()
170 struct c4iw_rdev *rdev; c4iw_mmap() local
182 rdev = &(to_c4iw_dev(context->device)->rdev); c4iw_mmap()
191 if ((addr >= pci_resource_start(rdev->lldi.pdev, 0)) && c4iw_mmap()
192 (addr < (pci_resource_start(rdev->lldi.pdev, 0) + c4iw_mmap()
193 pci_resource_len(rdev->lldi.pdev, 0)))) { c4iw_mmap()
202 } else if ((addr >= pci_resource_start(rdev->lldi.pdev, 2)) && c4iw_mmap()
203 (addr < (pci_resource_start(rdev->lldi.pdev, 2) + c4iw_mmap()
204 pci_resource_len(rdev->lldi.pdev, 2)))) { c4iw_mmap()
209 if (addr >= rdev->oc_mw_pa) c4iw_mmap()
212 if (!is_t4(rdev->lldi.adapter_type)) c4iw_mmap()
243 c4iw_put_resource(&rhp->rdev.resource.pdid_table, php->pdid); c4iw_deallocate_pd()
244 mutex_lock(&rhp->rdev.stats.lock); c4iw_deallocate_pd()
245 rhp->rdev.stats.pd.cur--; c4iw_deallocate_pd()
246 mutex_unlock(&rhp->rdev.stats.lock); c4iw_deallocate_pd()
261 pdid = c4iw_get_resource(&rhp->rdev.resource.pdid_table); c4iw_allocate_pd()
266 c4iw_put_resource(&rhp->rdev.resource.pdid_table, pdid); c4iw_allocate_pd()
277 mutex_lock(&rhp->rdev.stats.lock); c4iw_allocate_pd()
278 rhp->rdev.stats.pd.cur++; c4iw_allocate_pd()
279 if (rhp->rdev.stats.pd.cur > rhp->rdev.stats.pd.max) c4iw_allocate_pd()
280 rhp->rdev.stats.pd.max = rhp->rdev.stats.pd.cur; c4iw_allocate_pd()
281 mutex_unlock(&rhp->rdev.stats.lock); c4iw_allocate_pd()
304 memcpy(&(gid->raw[0]), dev->rdev.lldi.ports[port-1]->dev_addr, 6); c4iw_query_gid()
321 memcpy(&props->sys_image_guid, dev->rdev.lldi.ports[0]->dev_addr, 6); c4iw_query_device()
322 props->hw_ver = CHELSIO_CHIP_RELEASE(dev->rdev.lldi.adapter_type); c4iw_query_device()
323 props->fw_ver = dev->rdev.lldi.fw_vers; c4iw_query_device()
326 props->vendor_id = (u32)dev->rdev.lldi.pdev->vendor; c4iw_query_device()
327 props->vendor_part_id = (u32)dev->rdev.lldi.pdev->device; c4iw_query_device()
329 props->max_qp = dev->rdev.lldi.vr->qp.size / 2; c4iw_query_device()
330 props->max_qp_wr = dev->rdev.hw_queue.t4_max_qp_depth; c4iw_query_device()
333 props->max_res_rd_atom = dev->rdev.lldi.max_ird_adapter; c4iw_query_device()
334 props->max_qp_rd_atom = min(dev->rdev.lldi.max_ordird_qp, c4iw_query_device()
337 props->max_cq = dev->rdev.lldi.vr->qp.size; c4iw_query_device()
338 props->max_cqe = dev->rdev.hw_queue.t4_max_cq_depth; c4iw_query_device()
339 props->max_mr = c4iw_num_stags(&dev->rdev); c4iw_query_device()
357 netdev = dev->rdev.lldi.ports[port-1]; c4iw_query_port()
408 CHELSIO_CHIP_RELEASE(c4iw_dev->rdev.lldi.adapter_type)); show_rev()
419 FW_HDR_FW_VER_MAJOR_G(c4iw_dev->rdev.lldi.fw_vers), show_fw_ver()
420 FW_HDR_FW_VER_MINOR_G(c4iw_dev->rdev.lldi.fw_vers), show_fw_ver()
421 FW_HDR_FW_VER_MICRO_G(c4iw_dev->rdev.lldi.fw_vers), show_fw_ver()
422 FW_HDR_FW_VER_BUILD_G(c4iw_dev->rdev.lldi.fw_vers)); show_fw_ver()
431 struct net_device *lldev = c4iw_dev->rdev.lldi.ports[0]; show_hca()
444 return sprintf(buf, "%x.%x\n", c4iw_dev->rdev.lldi.pdev->vendor, show_board()
445 c4iw_dev->rdev.lldi.pdev->device); show_board()
454 cxgb4_get_tcp_stats(c4iw_dev->rdev.lldi.pdev, &v4, &v6); c4iw_get_mib()
499 BUG_ON(!dev->rdev.lldi.ports[0]); c4iw_register_device()
502 memcpy(&dev->ibdev.node_guid, dev->rdev.lldi.ports[0]->dev_addr, 6); c4iw_register_device()
529 dev->ibdev.phys_port_cnt = dev->rdev.lldi.nports; c4iw_register_device()
530 dev->ibdev.num_comp_vectors = dev->rdev.lldi.nciq; c4iw_register_device()
531 dev->ibdev.dma_device = &(dev->rdev.lldi.pdev->dev); c4iw_register_device()
H A Dmem.c55 return (is_t4(dev->rdev.lldi.adapter_type) || mr_exceeds_hw_limits()
56 is_t5(dev->rdev.lldi.adapter_type)) && mr_exceeds_hw_limits()
60 static int _c4iw_write_mem_dma_aligned(struct c4iw_rdev *rdev, u32 addr, _c4iw_write_mem_dma_aligned() argument
100 ret = c4iw_ofld_send(rdev, skb); _c4iw_write_mem_dma_aligned()
104 ret = c4iw_wait_for_reply(rdev, &wr_wait, 0, 0, __func__); _c4iw_write_mem_dma_aligned()
108 static int _c4iw_write_mem_inline(struct c4iw_rdev *rdev, u32 addr, u32 len, _c4iw_write_mem_inline() argument
119 if (is_t4(rdev->lldi.adapter_type)) _c4iw_write_mem_inline()
173 ret = c4iw_ofld_send(rdev, skb); _c4iw_write_mem_inline()
179 ret = c4iw_wait_for_reply(rdev, &wr_wait, 0, 0, __func__); _c4iw_write_mem_inline()
183 static int _c4iw_write_mem_dma(struct c4iw_rdev *rdev, u32 addr, u32 len, void *data) _c4iw_write_mem_dma() argument
191 daddr = dma_map_single(&rdev->lldi.pdev->dev, data, len, DMA_TO_DEVICE); _c4iw_write_mem_dma()
192 if (dma_mapping_error(&rdev->lldi.pdev->dev, daddr)) _c4iw_write_mem_dma()
205 ret = _c4iw_write_mem_dma_aligned(rdev, addr, dmalen, daddr, _c4iw_write_mem_dma()
214 ret = _c4iw_write_mem_inline(rdev, addr, remain, data); _c4iw_write_mem_dma()
216 dma_unmap_single(&rdev->lldi.pdev->dev, save, len, DMA_TO_DEVICE); _c4iw_write_mem_dma()
224 static int write_adapter_mem(struct c4iw_rdev *rdev, u32 addr, u32 len, write_adapter_mem() argument
227 if (is_t5(rdev->lldi.adapter_type) && use_dsgl) { write_adapter_mem()
229 if (_c4iw_write_mem_dma(rdev, addr, len, data)) { write_adapter_mem()
233 pci_name(rdev->lldi.pdev)); write_adapter_mem()
234 return _c4iw_write_mem_inline(rdev, addr, len, write_adapter_mem()
239 return _c4iw_write_mem_inline(rdev, addr, len, data); write_adapter_mem()
241 return _c4iw_write_mem_inline(rdev, addr, len, data); write_adapter_mem()
250 static int write_tpt_entry(struct c4iw_rdev *rdev, u32 reset_tpt_entry, write_tpt_entry() argument
261 if (c4iw_fatal_error(rdev)) write_tpt_entry()
268 stag_idx = c4iw_get_resource(&rdev->resource.tpt_table); write_tpt_entry()
270 mutex_lock(&rdev->stats.lock); write_tpt_entry()
271 rdev->stats.stag.fail++; write_tpt_entry()
272 mutex_unlock(&rdev->stats.lock); write_tpt_entry()
275 mutex_lock(&rdev->stats.lock); write_tpt_entry()
276 rdev->stats.stag.cur += 32; write_tpt_entry()
277 if (rdev->stats.stag.cur > rdev->stats.stag.max) write_tpt_entry()
278 rdev->stats.stag.max = rdev->stats.stag.cur; write_tpt_entry()
279 mutex_unlock(&rdev->stats.lock); write_tpt_entry()
299 FW_RI_TPTE_PBLADDR_V(PBL_OFF(rdev, pbl_addr)>>3)); write_tpt_entry()
306 err = write_adapter_mem(rdev, stag_idx + write_tpt_entry()
307 (rdev->lldi.vr->stag.start >> 5), write_tpt_entry()
311 c4iw_put_resource(&rdev->resource.tpt_table, stag_idx); write_tpt_entry()
312 mutex_lock(&rdev->stats.lock); write_tpt_entry()
313 rdev->stats.stag.cur -= 32; write_tpt_entry()
314 mutex_unlock(&rdev->stats.lock); write_tpt_entry()
319 static int write_pbl(struct c4iw_rdev *rdev, __be64 *pbl, write_pbl() argument
325 __func__, pbl_addr, rdev->lldi.vr->pbl.start, write_pbl()
328 err = write_adapter_mem(rdev, pbl_addr >> 5, pbl_size << 3, pbl); write_pbl()
332 static int dereg_mem(struct c4iw_rdev *rdev, u32 stag, u32 pbl_size, dereg_mem() argument
335 return write_tpt_entry(rdev, 1, &stag, 0, 0, 0, 0, 0, 0, 0UL, 0, 0, dereg_mem()
339 static int allocate_window(struct c4iw_rdev *rdev, u32 * stag, u32 pdid) allocate_window() argument
342 return write_tpt_entry(rdev, 0, stag, 0, pdid, FW_RI_STAG_MW, 0, 0, 0, allocate_window()
346 static int deallocate_window(struct c4iw_rdev *rdev, u32 stag) deallocate_window() argument
348 return write_tpt_entry(rdev, 1, &stag, 0, 0, 0, 0, 0, 0, 0UL, 0, 0, 0, deallocate_window()
352 static int allocate_stag(struct c4iw_rdev *rdev, u32 *stag, u32 pdid, allocate_stag() argument
356 return write_tpt_entry(rdev, 0, stag, 0, pdid, FW_RI_STAG_NSMR, 0, 0, 0, allocate_stag()
378 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, mhp->attr.pdid, register_mem()
390 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, register_mem()
405 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, mhp->attr.pdid, reregister_mem()
415 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, reregister_mem()
423 mhp->attr.pbl_addr = c4iw_pblpool_alloc(&mhp->rhp->rdev, alloc_pbl()
625 ret = write_pbl(&mhp->rhp->rdev, page_list, mhp->attr.pbl_addr, c4iw_register_phys_mem()
647 c4iw_pblpool_free(&mhp->rhp->rdev, mhp->attr.pbl_addr, c4iw_register_phys_mem()
682 ret = write_tpt_entry(&rhp->rdev, 0, &stag, 1, php->pdid, c4iw_get_dma_mr()
693 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, c4iw_get_dma_mr()
760 err = write_pbl(&mhp->rhp->rdev, c4iw_reg_user_mr()
772 err = write_pbl(&mhp->rhp->rdev, pages, c4iw_reg_user_mr()
794 c4iw_pblpool_free(&mhp->rhp->rdev, mhp->attr.pbl_addr, c4iw_reg_user_mr()
820 ret = allocate_window(&rhp->rdev, &stag, php->pdid); c4iw_alloc_mw()
832 deallocate_window(&rhp->rdev, mhp->attr.stag); c4iw_alloc_mw()
850 deallocate_window(&rhp->rdev, mhp->attr.stag); c4iw_dealloc_mw()
880 mhp->mpl = dma_alloc_coherent(&rhp->rdev.lldi.pdev->dev, c4iw_alloc_mr()
893 ret = allocate_stag(&rhp->rdev, &stag, php->pdid, c4iw_alloc_mr()
911 dereg_mem(&rhp->rdev, stag, mhp->attr.pbl_size, c4iw_alloc_mr()
914 c4iw_pblpool_free(&mhp->rhp->rdev, mhp->attr.pbl_addr, c4iw_alloc_mr()
917 dma_free_coherent(&mhp->rhp->rdev.lldi.pdev->dev, c4iw_alloc_mr()
964 dma_free_coherent(&mhp->rhp->rdev.lldi.pdev->dev, c4iw_dereg_mr()
966 dereg_mem(&rhp->rdev, mhp->attr.stag, mhp->attr.pbl_size, c4iw_dereg_mr()
969 c4iw_pblpool_free(&mhp->rhp->rdev, mhp->attr.pbl_addr, c4iw_dereg_mr()
/linux-4.4.14/drivers/rapidio/switches/
H A Dtsi57x.c160 tsi57x_em_init(struct rio_dev *rdev) tsi57x_em_init() argument
165 pr_debug("TSI578 %s [%d:%d]\n", __func__, rdev->destid, rdev->hopcount); tsi57x_em_init()
168 portnum < RIO_GET_TOTAL_PORTS(rdev->swpinfo); portnum++) { tsi57x_em_init()
170 rio_read_config_32(rdev, tsi57x_em_init()
172 rio_write_config_32(rdev, tsi57x_em_init()
177 rio_read_config_32(rdev, tsi57x_em_init()
178 rdev->phys_efptr + tsi57x_em_init()
181 rio_write_config_32(rdev, tsi57x_em_init()
182 rdev->phys_efptr + tsi57x_em_init()
186 rio_read_config_32(rdev, tsi57x_em_init()
188 rio_write_config_32(rdev, tsi57x_em_init()
193 rio_read_config_32(rdev, tsi57x_em_init()
195 rio_write_config_32(rdev, tsi57x_em_init()
200 rio_read_config_32(rdev, tsi57x_em_init()
201 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(portnum), tsi57x_em_init()
208 rio_write_config_32(rdev, tsi57x_em_init()
209 rdev->phys_efptr + RIO_PORT_LINKTO_CTL_CSR, 0x9a << 8); tsi57x_em_init()
215 tsi57x_em_handler(struct rio_dev *rdev, u8 portnum) tsi57x_em_handler() argument
217 struct rio_mport *mport = rdev->net->hport; tsi57x_em_handler()
223 rio_read_config_32(rdev, tsi57x_em_handler()
224 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(portnum), tsi57x_em_handler()
231 rio_read_config_32(rdev, tsi57x_em_handler()
232 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(portnum), tsi57x_em_handler()
235 rio_write_config_32(rdev, tsi57x_em_handler()
236 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(portnum), tsi57x_em_handler()
239 rio_write_config_32(rdev, tsi57x_em_handler()
240 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(portnum), tsi57x_em_handler()
247 rio_read_config_32(rdev, tsi57x_em_handler()
248 rdev->phys_efptr + RIO_PORT_N_MNT_RSP_CSR(portnum), tsi57x_em_handler()
256 rio_write_config_32(rdev, tsi57x_em_handler()
261 rio_read_config_32(rdev, tsi57x_em_handler()
262 rdev->phys_efptr + tsi57x_em_handler()
275 rio_read_config_32(rdev, TSI578_SP_INT_STATUS(portnum), &intstat); tsi57x_em_handler()
277 rdev->destid, rdev->hopcount, portnum, intstat); tsi57x_em_handler()
280 rio_read_config_32(rdev, tsi57x_em_handler()
283 route_port = rdev->rswitch->route_table[regval]; tsi57x_em_handler()
285 rio_name(rdev), portnum, regval); tsi57x_em_handler()
286 tsi57x_route_add_entry(mport, rdev->destid, rdev->hopcount, tsi57x_em_handler()
290 rio_write_config_32(rdev, TSI578_SP_INT_STATUS(portnum), tsi57x_em_handler()
307 static int tsi57x_probe(struct rio_dev *rdev, const struct rio_device_id *id) tsi57x_probe() argument
309 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); tsi57x_probe()
311 spin_lock(&rdev->rswitch->lock); tsi57x_probe()
313 if (rdev->rswitch->ops) { tsi57x_probe()
314 spin_unlock(&rdev->rswitch->lock); tsi57x_probe()
317 rdev->rswitch->ops = &tsi57x_switch_ops; tsi57x_probe()
319 if (rdev->do_enum) { tsi57x_probe()
321 rio_write_config_32(rdev, RIO_STD_RTE_DEFAULT_PORT, tsi57x_probe()
325 spin_unlock(&rdev->rswitch->lock); tsi57x_probe()
329 static void tsi57x_remove(struct rio_dev *rdev) tsi57x_remove() argument
331 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); tsi57x_remove()
332 spin_lock(&rdev->rswitch->lock); tsi57x_remove()
333 if (rdev->rswitch->ops != &tsi57x_switch_ops) { tsi57x_remove()
334 spin_unlock(&rdev->rswitch->lock); tsi57x_remove()
337 rdev->rswitch->ops = NULL; tsi57x_remove()
338 spin_unlock(&rdev->rswitch->lock); tsi57x_remove()
H A Didt_gen2.c217 idtg2_em_init(struct rio_dev *rdev) idtg2_em_init() argument
227 pr_debug("RIO: %s [%d:%d]\n", __func__, rdev->destid, rdev->hopcount); idtg2_em_init()
230 rio_write_config_32(rdev, IDT_PW_INFO_CSR, 0x0000e000); idtg2_em_init()
237 rio_write_config_32(rdev, IDT_LT_ERR_REPORT_EN, idtg2_em_init()
244 rio_read_config_32(rdev, IDT_DEV_CTRL_1, &regval); idtg2_em_init()
245 rio_write_config_32(rdev, IDT_DEV_CTRL_1, idtg2_em_init()
253 rio_write_config_32(rdev, IDT_PORT_ERR_REPORT_EN_BC, 0x807e8037); idtg2_em_init()
256 rio_write_config_32(rdev, IDT_PORT_ISERR_REPORT_EN_BC, idtg2_em_init()
260 tmp = RIO_GET_TOTAL_PORTS(rdev->swpinfo); idtg2_em_init()
262 rio_read_config_32(rdev, IDT_PORT_OPS(i), &regval); idtg2_em_init()
263 rio_write_config_32(rdev, idtg2_em_init()
270 rio_write_config_32(rdev, IDT_ERR_CAP, IDT_ERR_CAP_LOG_OVERWR); idtg2_em_init()
277 rio_write_config_32(rdev, IDT_LANE_ERR_REPORT_EN_BC, 0); idtg2_em_init()
282 tmp = (rdev->did == RIO_DID_IDTCPS1848) ? 48 : 16; idtg2_em_init()
284 rio_read_config_32(rdev, IDT_LANE_CTRL(i), &regval); idtg2_em_init()
285 rio_write_config_32(rdev, IDT_LANE_CTRL(i), idtg2_em_init()
294 rio_write_config_32(rdev, IDT_AUX_PORT_ERR_CAP_EN, 0); idtg2_em_init()
297 rio_write_config_32(rdev, IDT_AUX_ERR_REPORT_EN, 0); idtg2_em_init()
300 rio_write_config_32(rdev, IDT_JTAG_CTRL, 0); idtg2_em_init()
303 rio_read_config_32(rdev, IDT_I2C_MCTRL, &regval); idtg2_em_init()
304 rio_write_config_32(rdev, IDT_I2C_MCTRL, regval & ~IDT_I2C_MCTRL_GENPW); idtg2_em_init()
311 rio_write_config_32(rdev, IDT_CFGBLK_ERR_CAPTURE_EN, 0); idtg2_em_init()
314 rio_read_config_32(rdev, IDT_CFGBLK_ERR_REPORT, &regval); idtg2_em_init()
315 rio_write_config_32(rdev, IDT_CFGBLK_ERR_REPORT, idtg2_em_init()
319 rio_write_config_32(rdev, idtg2_em_init()
320 rdev->phys_efptr + RIO_PORT_LINKTO_CTL_CSR, 0x8e << 8); idtg2_em_init()
326 idtg2_em_handler(struct rio_dev *rdev, u8 portnum) idtg2_em_handler() argument
330 rio_read_config_32(rdev, idtg2_em_handler()
331 rdev->em_efptr + RIO_EM_LTL_ERR_DETECT, &em_ltlerrdet); idtg2_em_handler()
336 rio_read_config_32(rdev, idtg2_em_handler()
341 rio_name(rdev), em_ltlerrdet, regval); idtg2_em_handler()
344 rio_write_config_32(rdev, IDT_ISLTL_ADDRESS_CAP, 0); idtg2_em_handler()
349 rio_read_config_32(rdev, idtg2_em_handler()
350 rdev->em_efptr + RIO_EM_PN_ERR_DETECT(portnum), &em_perrdet); idtg2_em_handler()
357 rio_read_config_32(rdev, idtg2_em_handler()
361 " errors 0x%x\n", rio_name(rdev), regval); idtg2_em_handler()
364 rio_write_config_32(rdev, idtg2_em_handler()
375 struct rio_dev *rdev = to_rio_dev(dev); idtg2_show_errlog() local
379 while (!rio_read_config_32(rdev, IDT_ERR_RD, &regval)) { idtg2_show_errlog()
393 static int idtg2_sysfs(struct rio_dev *rdev, bool create) idtg2_sysfs() argument
395 struct device *dev = &rdev->dev; idtg2_sysfs()
420 static int idtg2_probe(struct rio_dev *rdev, const struct rio_device_id *id) idtg2_probe() argument
422 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); idtg2_probe()
424 spin_lock(&rdev->rswitch->lock); idtg2_probe()
426 if (rdev->rswitch->ops) { idtg2_probe()
427 spin_unlock(&rdev->rswitch->lock); idtg2_probe()
431 rdev->rswitch->ops = &idtg2_switch_ops; idtg2_probe()
433 if (rdev->do_enum) { idtg2_probe()
435 rio_write_config_32(rdev, idtg2_probe()
440 idtg2_sysfs(rdev, true); idtg2_probe()
442 spin_unlock(&rdev->rswitch->lock); idtg2_probe()
446 static void idtg2_remove(struct rio_dev *rdev) idtg2_remove() argument
448 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); idtg2_remove()
449 spin_lock(&rdev->rswitch->lock); idtg2_remove()
450 if (rdev->rswitch->ops != &idtg2_switch_ops) { idtg2_remove()
451 spin_unlock(&rdev->rswitch->lock); idtg2_remove()
454 rdev->rswitch->ops = NULL; idtg2_remove()
457 idtg2_sysfs(rdev, false); idtg2_remove()
459 spin_unlock(&rdev->rswitch->lock); idtg2_remove()
H A Dtsi568.c115 tsi568_em_init(struct rio_dev *rdev) tsi568_em_init() argument
120 pr_debug("TSI568 %s [%d:%d]\n", __func__, rdev->destid, rdev->hopcount); tsi568_em_init()
124 portnum < RIO_GET_TOTAL_PORTS(rdev->swpinfo); portnum++) { tsi568_em_init()
125 rio_read_config_32(rdev, TSI568_SP_MODE(portnum), &regval); tsi568_em_init()
126 rio_write_config_32(rdev, TSI568_SP_MODE(portnum), tsi568_em_init()
144 static int tsi568_probe(struct rio_dev *rdev, const struct rio_device_id *id) tsi568_probe() argument
146 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); tsi568_probe()
148 spin_lock(&rdev->rswitch->lock); tsi568_probe()
150 if (rdev->rswitch->ops) { tsi568_probe()
151 spin_unlock(&rdev->rswitch->lock); tsi568_probe()
155 rdev->rswitch->ops = &tsi568_switch_ops; tsi568_probe()
156 spin_unlock(&rdev->rswitch->lock); tsi568_probe()
160 static void tsi568_remove(struct rio_dev *rdev) tsi568_remove() argument
162 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); tsi568_remove()
163 spin_lock(&rdev->rswitch->lock); tsi568_remove()
164 if (rdev->rswitch->ops != &tsi568_switch_ops) { tsi568_remove()
165 spin_unlock(&rdev->rswitch->lock); tsi568_remove()
168 rdev->rswitch->ops = NULL; tsi568_remove()
169 spin_unlock(&rdev->rswitch->lock); tsi568_remove()
H A Didtcps.c133 static int idtcps_probe(struct rio_dev *rdev, const struct rio_device_id *id) idtcps_probe() argument
135 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); idtcps_probe()
137 spin_lock(&rdev->rswitch->lock); idtcps_probe()
139 if (rdev->rswitch->ops) { idtcps_probe()
140 spin_unlock(&rdev->rswitch->lock); idtcps_probe()
144 rdev->rswitch->ops = &idtcps_switch_ops; idtcps_probe()
146 if (rdev->do_enum) { idtcps_probe()
148 rio_write_config_32(rdev, idtcps_probe()
149 rdev->phys_efptr + RIO_PORT_LINKTO_CTL_CSR, 0x8e << 8); idtcps_probe()
151 rio_write_config_32(rdev, idtcps_probe()
155 spin_unlock(&rdev->rswitch->lock); idtcps_probe()
159 static void idtcps_remove(struct rio_dev *rdev) idtcps_remove() argument
161 pr_debug("RIO: %s for %s\n", __func__, rio_name(rdev)); idtcps_remove()
162 spin_lock(&rdev->rswitch->lock); idtcps_remove()
163 if (rdev->rswitch->ops != &idtcps_switch_ops) { idtcps_remove()
164 spin_unlock(&rdev->rswitch->lock); idtcps_remove()
167 rdev->rswitch->ops = NULL; idtcps_remove()
168 spin_unlock(&rdev->rswitch->lock); idtcps_remove()
/linux-4.4.14/sound/core/seq/
H A Dseq_virmidi.c79 static int snd_virmidi_dev_receive_event(struct snd_virmidi_dev *rdev, snd_virmidi_dev_receive_event() argument
86 read_lock(&rdev->filelist_lock); snd_virmidi_dev_receive_event()
87 list_for_each_entry(vmidi, &rdev->filelist, list) { snd_virmidi_dev_receive_event()
100 read_unlock(&rdev->filelist_lock); snd_virmidi_dev_receive_event()
115 struct snd_virmidi_dev *rdev;
117 rdev = rmidi->private_data;
118 return snd_virmidi_dev_receive_event(rdev, ev);
128 struct snd_virmidi_dev *rdev; snd_virmidi_event_input() local
130 rdev = private_data; snd_virmidi_event_input()
131 if (!(rdev->flags & SNDRV_VIRMIDI_USE)) snd_virmidi_event_input()
133 return snd_virmidi_dev_receive_event(rdev, ev); snd_virmidi_event_input()
163 !(vmidi->rdev->flags & SNDRV_VIRMIDI_SUBSCRIBE)) { snd_virmidi_output_trigger()
209 struct snd_virmidi_dev *rdev = substream->rmidi->private_data; snd_virmidi_input_open() local
222 vmidi->seq_mode = rdev->seq_mode; snd_virmidi_input_open()
223 vmidi->client = rdev->client; snd_virmidi_input_open()
224 vmidi->port = rdev->port; snd_virmidi_input_open()
226 write_lock_irqsave(&rdev->filelist_lock, flags); snd_virmidi_input_open()
227 list_add_tail(&vmidi->list, &rdev->filelist); snd_virmidi_input_open()
228 write_unlock_irqrestore(&rdev->filelist_lock, flags); snd_virmidi_input_open()
229 vmidi->rdev = rdev; snd_virmidi_input_open()
238 struct snd_virmidi_dev *rdev = substream->rmidi->private_data; snd_virmidi_output_open() local
250 vmidi->seq_mode = rdev->seq_mode; snd_virmidi_output_open()
251 vmidi->client = rdev->client; snd_virmidi_output_open()
252 vmidi->port = rdev->port; snd_virmidi_output_open()
254 vmidi->rdev = rdev; snd_virmidi_output_open()
264 struct snd_virmidi_dev *rdev = substream->rmidi->private_data; snd_virmidi_input_close() local
267 write_lock_irq(&rdev->filelist_lock); snd_virmidi_input_close()
269 write_unlock_irq(&rdev->filelist_lock); snd_virmidi_input_close()
294 struct snd_virmidi_dev *rdev; snd_virmidi_subscribe() local
296 rdev = private_data; snd_virmidi_subscribe()
297 if (!try_module_get(rdev->card->module)) snd_virmidi_subscribe()
299 rdev->flags |= SNDRV_VIRMIDI_SUBSCRIBE; snd_virmidi_subscribe()
309 struct snd_virmidi_dev *rdev; snd_virmidi_unsubscribe() local
311 rdev = private_data; snd_virmidi_unsubscribe()
312 rdev->flags &= ~SNDRV_VIRMIDI_SUBSCRIBE; snd_virmidi_unsubscribe()
313 module_put(rdev->card->module); snd_virmidi_unsubscribe()
324 struct snd_virmidi_dev *rdev; snd_virmidi_use() local
326 rdev = private_data; snd_virmidi_use()
327 if (!try_module_get(rdev->card->module)) snd_virmidi_use()
329 rdev->flags |= SNDRV_VIRMIDI_USE; snd_virmidi_use()
339 struct snd_virmidi_dev *rdev; snd_virmidi_unuse() local
341 rdev = private_data; snd_virmidi_unuse()
342 rdev->flags &= ~SNDRV_VIRMIDI_USE; snd_virmidi_unuse()
343 module_put(rdev->card->module); snd_virmidi_unuse()
367 static int snd_virmidi_dev_attach_seq(struct snd_virmidi_dev *rdev) snd_virmidi_dev_attach_seq() argument
374 if (rdev->client >= 0) snd_virmidi_dev_attach_seq()
383 client = snd_seq_create_kernel_client(rdev->card, rdev->device, snd_virmidi_dev_attach_seq()
384 "%s %d-%d", rdev->rmidi->name, snd_virmidi_dev_attach_seq()
385 rdev->card->number, snd_virmidi_dev_attach_seq()
386 rdev->device); snd_virmidi_dev_attach_seq()
391 rdev->client = client; snd_virmidi_dev_attach_seq()
395 sprintf(pinfo->name, "VirMIDI %d-%d", rdev->card->number, rdev->device); snd_virmidi_dev_attach_seq()
406 pcallbacks.private_data = rdev; snd_virmidi_dev_attach_seq()
416 rdev->client = -1; snd_virmidi_dev_attach_seq()
420 rdev->port = pinfo->addr.port; snd_virmidi_dev_attach_seq()
432 static void snd_virmidi_dev_detach_seq(struct snd_virmidi_dev *rdev) snd_virmidi_dev_detach_seq() argument
434 if (rdev->client >= 0) { snd_virmidi_dev_detach_seq()
435 snd_seq_delete_kernel_client(rdev->client); snd_virmidi_dev_detach_seq()
436 rdev->client = -1; snd_virmidi_dev_detach_seq()
445 struct snd_virmidi_dev *rdev = rmidi->private_data; snd_virmidi_dev_register() local
448 switch (rdev->seq_mode) { snd_virmidi_dev_register()
450 err = snd_virmidi_dev_attach_seq(rdev); snd_virmidi_dev_register()
455 if (rdev->client == 0) snd_virmidi_dev_register()
460 pr_err("ALSA: seq_virmidi: seq_mode is not set: %d\n", rdev->seq_mode); snd_virmidi_dev_register()
472 struct snd_virmidi_dev *rdev = rmidi->private_data; snd_virmidi_dev_unregister() local
474 if (rdev->seq_mode == SNDRV_VIRMIDI_SEQ_DISPATCH) snd_virmidi_dev_unregister()
475 snd_virmidi_dev_detach_seq(rdev); snd_virmidi_dev_unregister()
492 struct snd_virmidi_dev *rdev = rmidi->private_data; snd_virmidi_free() local
493 kfree(rdev); snd_virmidi_free()
504 struct snd_virmidi_dev *rdev; snd_virmidi_new() local
514 rdev = kzalloc(sizeof(*rdev), GFP_KERNEL); snd_virmidi_new()
515 if (rdev == NULL) { snd_virmidi_new()
519 rdev->card = card; snd_virmidi_new()
520 rdev->rmidi = rmidi; snd_virmidi_new()
521 rdev->device = device; snd_virmidi_new()
522 rdev->client = -1; snd_virmidi_new()
523 rwlock_init(&rdev->filelist_lock); snd_virmidi_new()
524 INIT_LIST_HEAD(&rdev->filelist); snd_virmidi_new()
525 rdev->seq_mode = SNDRV_VIRMIDI_SEQ_DISPATCH; snd_virmidi_new()
526 rmidi->private_data = rdev; snd_virmidi_new()
/linux-4.4.14/drivers/rapidio/
H A Drio-driver.c23 * @rdev: the RIO device structure to match against
31 const struct rio_dev *rdev) rio_match_device()
34 if (((id->vid == RIO_ANY_ID) || (id->vid == rdev->vid)) && rio_match_device()
35 ((id->did == RIO_ANY_ID) || (id->did == rdev->did)) && rio_match_device()
37 || (id->asm_vid == rdev->asm_vid)) rio_match_device()
39 || (id->asm_did == rdev->asm_did))) rio_match_device()
49 * @rdev: RIO device being referenced
57 struct rio_dev *rio_dev_get(struct rio_dev *rdev) rio_dev_get() argument
59 if (rdev) rio_dev_get()
60 get_device(&rdev->dev); rio_dev_get()
62 return rdev; rio_dev_get()
68 * @rdev: RIO device being disconnected
74 void rio_dev_put(struct rio_dev *rdev) rio_dev_put() argument
76 if (rdev) rio_dev_put()
77 put_device(&rdev->dev); rio_dev_put()
89 struct rio_dev *rdev = to_rio_dev(dev); rio_device_probe() local
93 if (!rdev->driver && rdrv->probe) { rio_device_probe()
96 id = rio_match_device(rdrv->id_table, rdev); rio_device_probe()
97 rio_dev_get(rdev); rio_device_probe()
99 error = rdrv->probe(rdev, id); rio_device_probe()
101 rdev->driver = rdrv; rio_device_probe()
104 rio_dev_put(rdev); rio_device_probe()
120 struct rio_dev *rdev = to_rio_dev(dev); rio_device_remove() local
121 struct rio_driver *rdrv = rdev->driver; rio_device_remove()
125 rdrv->remove(rdev); rio_device_remove()
126 rdev->driver = NULL; rio_device_remove()
129 rio_dev_put(rdev); rio_device_remove()
167 void rio_attach_device(struct rio_dev *rdev) rio_attach_device() argument
169 rdev->dev.bus = &rio_bus_type; rio_attach_device()
185 struct rio_dev *rdev = to_rio_dev(dev); rio_match_bus() local
193 found_id = rio_match_device(id, rdev); rio_match_bus()
203 struct rio_dev *rdev; rio_uevent() local
208 rdev = to_rio_dev(dev); rio_uevent()
209 if (!rdev) rio_uevent()
213 rdev->vid, rdev->did, rdev->asm_vid, rdev->asm_did)) rio_uevent()
29 rio_match_device(const struct rio_device_id *id, const struct rio_dev *rdev) rio_match_device() argument
H A Drio-scan.c40 static void rio_init_em(struct rio_dev *rdev);
213 struct rio_dev *rdev; rio_clear_locks() local
227 list_for_each_entry(rdev, &net->devices, net_list) { rio_clear_locks()
228 rio_write_config_32(rdev, RIO_HOST_DID_LOCK_CSR, rio_clear_locks()
230 rio_read_config_32(rdev, RIO_HOST_DID_LOCK_CSR, &result); rio_clear_locks()
234 rdev->vid, rdev->did); rio_clear_locks()
239 rio_read_config_32(rdev, rio_clear_locks()
240 rdev->phys_efptr + RIO_PORT_GEN_CTL_CSR, rio_clear_locks()
243 rio_write_config_32(rdev, rio_clear_locks()
244 rdev->phys_efptr + RIO_PORT_GEN_CTL_CSR, rio_clear_locks()
303 struct rio_dev *rdev; rio_release_dev() local
305 rdev = to_rio_dev(dev); rio_release_dev()
306 kfree(rdev); rio_release_dev()
311 * @rdev: RIO device
318 static int rio_is_switch(struct rio_dev *rdev) rio_is_switch() argument
320 if (rdev->pef & RIO_PEF_SWITCH) rio_is_switch()
346 struct rio_dev *rdev; rio_setup_device() local
366 rdev = kzalloc(size, GFP_KERNEL); rio_setup_device()
367 if (!rdev) rio_setup_device()
370 rdev->net = net; rio_setup_device()
371 rdev->pef = result; rio_setup_device()
372 rdev->swpinfo = swpinfo; rio_setup_device()
375 rdev->did = result >> 16; rio_setup_device()
376 rdev->vid = result & 0xffff; rio_setup_device()
378 &rdev->device_rev); rio_setup_device()
381 rdev->asm_did = result >> 16; rio_setup_device()
382 rdev->asm_vid = result & 0xffff; rio_setup_device()
385 rdev->asm_rev = result >> 16; rio_setup_device()
386 if (rdev->pef & RIO_PEF_EXT_FEATURES) { rio_setup_device()
387 rdev->efptr = result & 0xffff; rio_setup_device()
388 rdev->phys_efptr = rio_mport_get_physefb(port, 0, destid, rio_setup_device()
391 rdev->em_efptr = rio_mport_get_feature(port, 0, destid, rio_setup_device()
396 &rdev->src_ops); rio_setup_device()
398 &rdev->dst_ops); rio_setup_device()
408 rdev->comp_tag = next_comptag++; rio_setup_device()
409 rdev->do_enum = true; rio_setup_device()
413 &rdev->comp_tag); rio_setup_device()
416 if (rio_device_has_destid(port, rdev->src_ops, rdev->dst_ops)) { rio_setup_device()
419 rdev->destid = next_destid; rio_setup_device()
422 rdev->destid = rio_get_device_id(port, destid, hopcount); rio_setup_device()
424 rdev->hopcount = 0xff; rio_setup_device()
429 rdev->destid = destid; rio_setup_device()
430 rdev->hopcount = hopcount; rio_setup_device()
434 if (rio_is_switch(rdev)) { rio_setup_device()
435 rswitch = rdev->rswitch; rio_setup_device()
447 dev_set_name(&rdev->dev, "%02x:s:%04x", rdev->net->id, rio_setup_device()
448 rdev->comp_tag & RIO_CTAG_UDEVID); rio_setup_device()
451 rio_route_clr_table(rdev, RIO_GLOBAL_TABLE, 0); rio_setup_device()
460 dev_set_name(&rdev->dev, "%02x:e:%04x", rdev->net->id, rio_setup_device()
461 rdev->comp_tag & RIO_CTAG_UDEVID); rio_setup_device()
464 rdev->dev.parent = &port->dev; rio_setup_device()
465 rio_attach_device(rdev); rio_setup_device()
467 device_initialize(&rdev->dev); rio_setup_device()
468 rdev->dev.release = rio_release_dev; rio_setup_device()
469 rio_dev_get(rdev); rio_setup_device()
471 rdev->dma_mask = DMA_BIT_MASK(32); rio_setup_device()
472 rdev->dev.dma_mask = &rdev->dma_mask; rio_setup_device()
473 rdev->dev.coherent_dma_mask = DMA_BIT_MASK(32); rio_setup_device()
475 if (rdev->dst_ops & RIO_DST_OPS_DOORBELL) rio_setup_device()
476 rio_init_dbell_res(&rdev->riores[RIO_DOORBELL_RESOURCE], rio_setup_device()
479 ret = rio_add_device(rdev); rio_setup_device()
483 return rdev; rio_setup_device()
489 kfree(rdev); rio_setup_device()
567 struct rio_dev *rdev; rio_enum_peer() local
587 rdev = rio_get_comptag((regval & 0xffff), NULL); rio_enum_peer()
589 if (rdev && prev && rio_is_switch(prev)) { rio_enum_peer()
591 rio_name(rdev)); rio_enum_peer()
592 prev->rswitch->nextdev[prev_port] = rdev; rio_enum_peer()
621 rdev = rio_setup_device(net, port, RIO_ANY_DESTID(port->sys_size), rio_enum_peer()
623 if (rdev) { rio_enum_peer()
625 list_add_tail(&rdev->net_list, &net->devices); rio_enum_peer()
626 rdev->prev = prev; rio_enum_peer()
628 prev->rswitch->nextdev[prev_port] = rdev; rio_enum_peer()
632 if (rio_is_switch(rdev)) { rio_enum_peer()
639 sw_inport = RIO_GET_PORT_NUM(rdev->swpinfo); rio_enum_peer()
640 rio_route_add_entry(rdev, RIO_GLOBAL_TABLE, rio_enum_peer()
642 rdev->rswitch->route_table[port->host_deviceid] = sw_inport; rio_enum_peer()
647 rio_route_add_entry(rdev, RIO_GLOBAL_TABLE, rio_enum_peer()
649 rdev->rswitch->route_table[destid] = sw_inport; rio_enum_peer()
655 rio_name(rdev), rdev->vid, rdev->did, rio_enum_peer()
656 RIO_GET_TOTAL_PORTS(rdev->swpinfo)); rio_enum_peer()
659 port_num < RIO_GET_TOTAL_PORTS(rdev->swpinfo); rio_enum_peer()
665 rdev->rswitch->port_ok |= (1 << port_num); rio_enum_peer()
680 rdev->rswitch->port_ok |= (1 << port_num); rio_enum_peer()
681 rio_route_add_entry(rdev, RIO_GLOBAL_TABLE, rio_enum_peer()
686 rdev, port_num) < 0) rio_enum_peer()
695 rio_route_add_entry(rdev, rio_enum_peer()
700 rdev->rswitch-> rio_enum_peer()
712 if (rdev->em_efptr) rio_enum_peer()
713 rio_set_port_lockout(rdev, port_num, 1); rio_enum_peer()
715 rdev->rswitch->port_ok &= ~(1 << port_num); rio_enum_peer()
720 if ((rdev->src_ops & RIO_SRC_OPS_PORT_WRITE) && rio_enum_peer()
721 (rdev->em_efptr)) { rio_enum_peer()
722 rio_write_config_32(rdev, rio_enum_peer()
723 rdev->em_efptr + RIO_EM_PW_TGT_DEVID, rio_enum_peer()
728 rio_init_em(rdev); rio_enum_peer()
734 rdev->destid = sw_destid; rio_enum_peer()
737 rio_name(rdev), rdev->vid, rdev->did); rio_enum_peer()
776 struct rio_dev *rdev; rio_disc_peer() local
780 if ((rdev = rio_setup_device(net, port, destid, hopcount, 0))) { rio_disc_peer()
782 list_add_tail(&rdev->net_list, &net->devices); rio_disc_peer()
783 rdev->prev = prev; rio_disc_peer()
785 prev->rswitch->nextdev[prev_port] = rdev; rio_disc_peer()
789 if (rio_is_switch(rdev)) { rio_disc_peer()
791 rdev->destid = destid; rio_disc_peer()
795 rio_name(rdev), rdev->vid, rdev->did, rio_disc_peer()
796 RIO_GET_TOTAL_PORTS(rdev->swpinfo)); rio_disc_peer()
798 port_num < RIO_GET_TOTAL_PORTS(rdev->swpinfo); rio_disc_peer()
800 if (RIO_GET_PORT_NUM(rdev->swpinfo) == port_num) rio_disc_peer()
814 rio_route_get_entry(rdev, rio_disc_peer()
826 hopcount + 1, rdev, port_num) < 0) rio_disc_peer()
832 rio_name(rdev), rdev->vid, rdev->did); rio_disc_peer()
924 struct rio_dev *rdev, *swrdev; rio_update_route_tables() local
929 list_for_each_entry(rdev, &net->devices, net_list) { rio_update_route_tables()
931 destid = rdev->destid; rio_update_route_tables()
935 if (rio_is_switch(rdev) && (rdev->rswitch == rswitch)) rio_update_route_tables()
957 * @rdev: RIO device
962 static void rio_init_em(struct rio_dev *rdev) rio_init_em() argument
964 if (rio_is_switch(rdev) && (rdev->em_efptr) && rio_init_em()
965 rdev->rswitch->ops && rdev->rswitch->ops->em_init) { rio_init_em()
966 rdev->rswitch->ops->em_init(rdev); rio_init_em()
1072 struct rio_dev *rdev; rio_build_route_tables() local
1077 rdev = sw_to_rio_dev(rswitch); rio_build_route_tables()
1079 rio_lock_device(net->hport, rdev->destid, rio_build_route_tables()
1080 rdev->hopcount, 1000); rio_build_route_tables()
1084 if (rio_route_get_entry(rdev, RIO_GLOBAL_TABLE, rio_build_route_tables()
1090 rio_unlock_device(net->hport, rdev->destid, rdev->hopcount); rio_build_route_tables()
H A Drio-sysfs.c26 struct rio_dev *rdev = to_rio_dev(dev); \
28 return sprintf(buf, format_string, rdev->field); \
43 struct rio_dev *rdev = to_rio_dev(dev); routes_show() local
47 for (i = 0; i < RIO_MAX_ROUTE_ENTRIES(rdev->net->hport->sys_size); routes_show()
49 if (rdev->rswitch->route_table[i] == RIO_INVALID_ROUTE) routes_show()
53 rdev->rswitch->route_table[i]); routes_show()
63 struct rio_dev *rdev = to_rio_dev(dev); lprev_show() local
66 (rdev->prev) ? rio_name(rdev->prev) : "root"); lprev_show()
73 struct rio_dev *rdev = to_rio_dev(dev); lnext_show() local
77 if (rdev->pef & RIO_PEF_SWITCH) { lnext_show()
78 for (i = 0; i < RIO_GET_TOTAL_PORTS(rdev->swpinfo); i++) { lnext_show()
79 if (rdev->rswitch->nextdev[i]) lnext_show()
81 rio_name(rdev->rswitch->nextdev[i])); lnext_show()
94 struct rio_dev *rdev = to_rio_dev(dev); modalias_show() local
97 rdev->vid, rdev->did, rdev->asm_vid, rdev->asm_did); modalias_show()
266 * @rdev: device whose entries should be created
268 * Create files when @rdev is added to sysfs.
270 int rio_create_sysfs_dev_files(struct rio_dev *rdev) rio_create_sysfs_dev_files() argument
274 err = device_create_bin_file(&rdev->dev, &rio_config_attr); rio_create_sysfs_dev_files()
276 if (!err && (rdev->pef & RIO_PEF_SWITCH)) { rio_create_sysfs_dev_files()
277 err |= device_create_file(&rdev->dev, &dev_attr_routes); rio_create_sysfs_dev_files()
278 err |= device_create_file(&rdev->dev, &dev_attr_lnext); rio_create_sysfs_dev_files()
279 err |= device_create_file(&rdev->dev, &dev_attr_hopcount); rio_create_sysfs_dev_files()
284 rio_name(rdev)); rio_create_sysfs_dev_files()
291 * @rdev: device whose entries we should free
293 * Cleanup when @rdev is removed from sysfs.
295 void rio_remove_sysfs_dev_files(struct rio_dev *rdev) rio_remove_sysfs_dev_files() argument
297 device_remove_bin_file(&rdev->dev, &rio_config_attr); rio_remove_sysfs_dev_files()
298 if (rdev->pef & RIO_PEF_SWITCH) { rio_remove_sysfs_dev_files()
299 device_remove_file(&rdev->dev, &dev_attr_routes); rio_remove_sysfs_dev_files()
300 device_remove_file(&rdev->dev, &dev_attr_lnext); rio_remove_sysfs_dev_files()
301 device_remove_file(&rdev->dev, &dev_attr_hopcount); rio_remove_sysfs_dev_files()
H A Drio.c72 * @rdev: RIO device
78 int rio_add_device(struct rio_dev *rdev) rio_add_device() argument
82 err = device_add(&rdev->dev); rio_add_device()
87 list_add_tail(&rdev->global_list, &rio_devices); rio_add_device()
90 rio_create_sysfs_dev_files(rdev); rio_add_device()
353 * @rdev: RIO device from which to allocate the doorbell resource
360 struct resource *rio_request_outb_dbell(struct rio_dev *rdev, u16 start, rio_request_outb_dbell() argument
369 if (request_resource(&rdev->riores[RIO_DOORBELL_RESOURCE], res) rio_request_outb_dbell()
381 * @rdev: RIO device from which to release the doorbell resource
387 int rio_release_outb_dbell(struct rio_dev *rdev, struct resource *res) rio_release_outb_dbell() argument
398 * @rdev: RIO device to which register inbound port-write callback routine
404 int rio_request_inb_pwrite(struct rio_dev *rdev, rio_request_inb_pwrite() argument
405 int (*pwcback)(struct rio_dev *rdev, union rio_pw_msg *msg, int step)) rio_request_inb_pwrite()
410 if (rdev->pwcback != NULL) rio_request_inb_pwrite()
413 rdev->pwcback = pwcback; rio_request_inb_pwrite()
422 * @rdev: RIO device which registered for inbound port-write callback
427 int rio_release_inb_pwrite(struct rio_dev *rdev) rio_release_inb_pwrite() argument
432 if (rdev->pwcback) { rio_release_inb_pwrite()
433 rdev->pwcback = NULL; rio_release_inb_pwrite()
550 struct rio_dev *rdev; rio_get_comptag() local
556 rdev = rio_dev_g(n); rio_get_comptag()
557 if (rdev->comp_tag == comp_tag) rio_get_comptag()
561 rdev = NULL; rio_get_comptag()
564 return rdev; rio_get_comptag()
570 * @rdev: Pointer to RIO device control structure
574 int rio_set_port_lockout(struct rio_dev *rdev, u32 pnum, int lock) rio_set_port_lockout() argument
578 rio_read_config_32(rdev, rio_set_port_lockout()
579 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(pnum), rio_set_port_lockout()
586 rio_write_config_32(rdev, rio_set_port_lockout()
587 rdev->phys_efptr + RIO_PORT_N_CTL_CSR(pnum), rio_set_port_lockout()
657 * @rdev: RIO device failed to respond
658 * @nrdev: Last active device on the route to rdev
659 * @npnum: nrdev's port number on the route to rdev
665 rio_chk_dev_route(struct rio_dev *rdev, struct rio_dev **nrdev, int *npnum) rio_chk_dev_route() argument
672 while (rdev->prev && (rdev->prev->pef & RIO_PEF_SWITCH)) { rio_chk_dev_route()
673 if (!rio_read_config_32(rdev->prev, RIO_DEV_ID_CAR, &result)) { rio_chk_dev_route()
674 prev = rdev->prev; rio_chk_dev_route()
677 rdev = rdev->prev; rio_chk_dev_route()
683 p_port = prev->rswitch->route_table[rdev->destid]; rio_chk_dev_route()
692 pr_debug("RIO: failed to trace route to %s\n", rio_name(rdev)); rio_chk_dev_route()
723 * @rdev: Pointer to RIO device control structure
725 static int rio_chk_dev_access(struct rio_dev *rdev) rio_chk_dev_access() argument
727 return rio_mport_chk_dev_access(rdev->net->hport, rio_chk_dev_access()
728 rdev->destid, rdev->hopcount); rio_chk_dev_access()
734 * @rdev: RIO devive to issue Input-status command
739 rio_get_input_status(struct rio_dev *rdev, int pnum, u32 *lnkresp) rio_get_input_status() argument
747 rio_read_config_32(rdev, rio_get_input_status()
748 rdev->phys_efptr + RIO_PORT_N_MNT_RSP_CSR(pnum), rio_get_input_status()
754 rio_write_config_32(rdev, rio_get_input_status()
755 rdev->phys_efptr + RIO_PORT_N_MNT_REQ_CSR(pnum), rio_get_input_status()
765 rio_read_config_32(rdev, rio_get_input_status()
766 rdev->phys_efptr + RIO_PORT_N_MNT_RSP_CSR(pnum), rio_get_input_status()
779 * @rdev: Pointer to RIO device control structure
783 static int rio_clr_err_stopped(struct rio_dev *rdev, u32 pnum, u32 err_status) rio_clr_err_stopped() argument
785 struct rio_dev *nextdev = rdev->rswitch->nextdev[pnum]; rio_clr_err_stopped()
790 rio_read_config_32(rdev, rio_clr_err_stopped()
791 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(pnum), rio_clr_err_stopped()
799 if (rio_get_input_status(rdev, pnum, &regval)) { rio_clr_err_stopped()
808 rio_read_config_32(rdev, rio_clr_err_stopped()
809 rdev->phys_efptr + RIO_PORT_N_ACK_STS_CSR(pnum), rio_clr_err_stopped()
826 rio_write_config_32(rdev, rio_clr_err_stopped()
827 rdev->phys_efptr + RIO_PORT_N_ACK_STS_CSR(pnum), rio_clr_err_stopped()
844 rio_read_config_32(rdev, rio_clr_err_stopped()
845 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(pnum), rio_clr_err_stopped()
856 rio_read_config_32(rdev, rio_clr_err_stopped()
857 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(pnum), rio_clr_err_stopped()
875 struct rio_dev *rdev; rio_inb_pwrite_handler() local
879 rdev = rio_get_comptag((pw_msg->em.comptag & RIO_CTAG_UDEVID), NULL); rio_inb_pwrite_handler()
880 if (rdev == NULL) { rio_inb_pwrite_handler()
887 pr_debug("RIO: Port-Write message from %s\n", rio_name(rdev)); rio_inb_pwrite_handler()
907 if (rdev->pwcback != NULL) { rio_inb_pwrite_handler()
908 rc = rdev->pwcback(rdev, pw_msg, 0); rio_inb_pwrite_handler()
919 if (rio_chk_dev_access(rdev)) { rio_inb_pwrite_handler()
925 if (rio_chk_dev_route(rdev, &rdev, &portnum)) { rio_inb_pwrite_handler()
927 rio_name(rdev)); rio_inb_pwrite_handler()
934 if (!(rdev->pef & RIO_PEF_SWITCH)) rio_inb_pwrite_handler()
937 if (rdev->phys_efptr == 0) { rio_inb_pwrite_handler()
939 rio_name(rdev)); rio_inb_pwrite_handler()
946 if (rdev->rswitch->ops && rdev->rswitch->ops->em_handle) rio_inb_pwrite_handler()
947 rdev->rswitch->ops->em_handle(rdev, portnum); rio_inb_pwrite_handler()
949 rio_read_config_32(rdev, rio_inb_pwrite_handler()
950 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(portnum), rio_inb_pwrite_handler()
956 if (!(rdev->rswitch->port_ok & (1 << portnum))) { rio_inb_pwrite_handler()
957 rdev->rswitch->port_ok |= (1 << portnum); rio_inb_pwrite_handler()
958 rio_set_port_lockout(rdev, portnum, 0); rio_inb_pwrite_handler()
961 rio_name(rdev), portnum); rio_inb_pwrite_handler()
970 if (rio_clr_err_stopped(rdev, portnum, err_status)) rio_inb_pwrite_handler()
971 rio_clr_err_stopped(rdev, portnum, 0); rio_inb_pwrite_handler()
975 if (rdev->rswitch->port_ok & (1 << portnum)) { rio_inb_pwrite_handler()
976 rdev->rswitch->port_ok &= ~(1 << portnum); rio_inb_pwrite_handler()
977 rio_set_port_lockout(rdev, portnum, 1); rio_inb_pwrite_handler()
979 rio_write_config_32(rdev, rio_inb_pwrite_handler()
980 rdev->phys_efptr + rio_inb_pwrite_handler()
986 rio_name(rdev), portnum); rio_inb_pwrite_handler()
990 rio_read_config_32(rdev, rio_inb_pwrite_handler()
991 rdev->em_efptr + RIO_EM_PN_ERR_DETECT(portnum), &em_perrdet); rio_inb_pwrite_handler()
996 rio_write_config_32(rdev, rio_inb_pwrite_handler()
997 rdev->em_efptr + RIO_EM_PN_ERR_DETECT(portnum), 0); rio_inb_pwrite_handler()
1000 rio_read_config_32(rdev, rio_inb_pwrite_handler()
1001 rdev->em_efptr + RIO_EM_LTL_ERR_DETECT, &em_ltlerrdet); rio_inb_pwrite_handler()
1006 rio_write_config_32(rdev, rio_inb_pwrite_handler()
1007 rdev->em_efptr + RIO_EM_LTL_ERR_DETECT, 0); rio_inb_pwrite_handler()
1011 rio_write_config_32(rdev, rio_inb_pwrite_handler()
1012 rdev->phys_efptr + RIO_PORT_N_ERR_STS_CSR(portnum), rio_inb_pwrite_handler()
1131 struct rio_dev *rdev; rio_get_asm() local
1138 rdev = rio_dev_g(n); rio_get_asm()
1139 if ((vid == RIO_ANY_ID || rdev->vid == vid) && rio_get_asm()
1140 (did == RIO_ANY_ID || rdev->did == did) && rio_get_asm()
1141 (asm_vid == RIO_ANY_ID || rdev->asm_vid == asm_vid) && rio_get_asm()
1142 (asm_did == RIO_ANY_ID || rdev->asm_did == asm_did)) rio_get_asm()
1146 rdev = NULL; rio_get_asm()
1149 rdev = rio_dev_get(rdev); rio_get_asm()
1151 return rdev; rio_get_asm()
1357 * @rdev: RIO device
1372 int rio_route_add_entry(struct rio_dev *rdev, rio_route_add_entry() argument
1376 struct rio_switch_ops *ops = rdev->rswitch->ops; rio_route_add_entry()
1379 rc = rio_lock_device(rdev->net->hport, rdev->destid, rio_route_add_entry()
1380 rdev->hopcount, 1000); rio_route_add_entry()
1385 spin_lock(&rdev->rswitch->lock); rio_route_add_entry()
1388 rc = rio_std_route_add_entry(rdev->net->hport, rdev->destid, rio_route_add_entry()
1389 rdev->hopcount, table, rio_route_add_entry()
1392 rc = ops->add_entry(rdev->net->hport, rdev->destid, rio_route_add_entry()
1393 rdev->hopcount, table, route_destid, rio_route_add_entry()
1398 spin_unlock(&rdev->rswitch->lock); rio_route_add_entry()
1401 rio_unlock_device(rdev->net->hport, rdev->destid, rio_route_add_entry()
1402 rdev->hopcount); rio_route_add_entry()
1410 * @rdev: RIO device
1425 int rio_route_get_entry(struct rio_dev *rdev, u16 table, rio_route_get_entry() argument
1429 struct rio_switch_ops *ops = rdev->rswitch->ops; rio_route_get_entry()
1432 rc = rio_lock_device(rdev->net->hport, rdev->destid, rio_route_get_entry()
1433 rdev->hopcount, 1000); rio_route_get_entry()
1438 spin_lock(&rdev->rswitch->lock); rio_route_get_entry()
1441 rc = rio_std_route_get_entry(rdev->net->hport, rdev->destid, rio_route_get_entry()
1442 rdev->hopcount, table, rio_route_get_entry()
1445 rc = ops->get_entry(rdev->net->hport, rdev->destid, rio_route_get_entry()
1446 rdev->hopcount, table, route_destid, rio_route_get_entry()
1451 spin_unlock(&rdev->rswitch->lock); rio_route_get_entry()
1454 rio_unlock_device(rdev->net->hport, rdev->destid, rio_route_get_entry()
1455 rdev->hopcount); rio_route_get_entry()
1462 * @rdev: RIO device
1474 int rio_route_clr_table(struct rio_dev *rdev, u16 table, int lock) rio_route_clr_table() argument
1477 struct rio_switch_ops *ops = rdev->rswitch->ops; rio_route_clr_table()
1480 rc = rio_lock_device(rdev->net->hport, rdev->destid, rio_route_clr_table()
1481 rdev->hopcount, 1000); rio_route_clr_table()
1486 spin_lock(&rdev->rswitch->lock); rio_route_clr_table()
1489 rc = rio_std_route_clr_table(rdev->net->hport, rdev->destid, rio_route_clr_table()
1490 rdev->hopcount, table); rio_route_clr_table()
1492 rc = ops->clr_table(rdev->net->hport, rdev->destid, rio_route_clr_table()
1493 rdev->hopcount, table); rio_route_clr_table()
1498 spin_unlock(&rdev->rswitch->lock); rio_route_clr_table()
1501 rio_unlock_device(rdev->net->hport, rdev->destid, rio_route_clr_table()
1502 rdev->hopcount); rio_route_clr_table()
1538 * @rdev: RIO device associated with DMA transfer
1542 struct dma_chan *rio_request_dma(struct rio_dev *rdev) rio_request_dma() argument
1544 return rio_request_mport_dma(rdev->net->hport); rio_request_dma()
1596 * @rdev: RIO device control structure
1607 struct dma_async_tx_descriptor *rio_dma_prep_slave_sg(struct rio_dev *rdev, rio_dma_prep_slave_sg() argument
1611 return rio_dma_prep_xfer(dchan, rdev->destid, data, direction, flags); rio_dma_prep_slave_sg()
H A Drio.h30 extern int rio_create_sysfs_dev_files(struct rio_dev *rdev);
34 extern int rio_route_add_entry(struct rio_dev *rdev,
36 extern int rio_route_get_entry(struct rio_dev *rdev, u16 table,
38 extern int rio_route_clr_table(struct rio_dev *rdev, u16 table, int lock);
39 extern int rio_set_port_lockout(struct rio_dev *rdev, u32 pnum, int lock);
41 extern int rio_add_device(struct rio_dev *rdev);
46 extern void rio_attach_device(struct rio_dev *rdev);
/linux-4.4.14/drivers/hid/
H A Dhid-picolcd_cir.c108 struct rc_dev *rdev; picolcd_init_cir() local
111 rdev = rc_allocate_device(); picolcd_init_cir()
112 if (!rdev) picolcd_init_cir()
115 rdev->priv = data; picolcd_init_cir()
116 rdev->driver_type = RC_DRIVER_IR_RAW; picolcd_init_cir()
117 rdev->allowed_protocols = RC_BIT_ALL; picolcd_init_cir()
118 rdev->open = picolcd_cir_open; picolcd_init_cir()
119 rdev->close = picolcd_cir_close; picolcd_init_cir()
120 rdev->input_name = data->hdev->name; picolcd_init_cir()
121 rdev->input_phys = data->hdev->phys; picolcd_init_cir()
122 rdev->input_id.bustype = data->hdev->bus; picolcd_init_cir()
123 rdev->input_id.vendor = data->hdev->vendor; picolcd_init_cir()
124 rdev->input_id.product = data->hdev->product; picolcd_init_cir()
125 rdev->input_id.version = data->hdev->version; picolcd_init_cir()
126 rdev->dev.parent = &data->hdev->dev; picolcd_init_cir()
127 rdev->driver_name = PICOLCD_NAME; picolcd_init_cir()
128 rdev->map_name = RC_MAP_RC6_MCE; picolcd_init_cir()
129 rdev->timeout = MS_TO_NS(100); picolcd_init_cir()
130 rdev->rx_resolution = US_TO_NS(1); picolcd_init_cir()
132 ret = rc_register_device(rdev); picolcd_init_cir()
135 data->rc_dev = rdev; picolcd_init_cir()
139 rc_free_device(rdev); picolcd_init_cir()
145 struct rc_dev *rdev = data->rc_dev; picolcd_exit_cir() local
148 rc_unregister_device(rdev); picolcd_exit_cir()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_amdkfd.c46 bool amdgpu_amdkfd_load_interface(struct amdgpu_device *rdev) amdgpu_amdkfd_load_interface() argument
52 switch (rdev->asic_type) { amdgpu_amdkfd_load_interface()
104 void amdgpu_amdkfd_device_probe(struct amdgpu_device *rdev) amdgpu_amdkfd_device_probe() argument
107 rdev->kfd = kgd2kfd->probe((struct kgd_dev *)rdev, amdgpu_amdkfd_device_probe()
108 rdev->pdev, kfd2kgd); amdgpu_amdkfd_device_probe()
111 void amdgpu_amdkfd_device_init(struct amdgpu_device *rdev) amdgpu_amdkfd_device_init() argument
113 if (rdev->kfd) { amdgpu_amdkfd_device_init()
121 amdgpu_doorbell_get_kfd_info(rdev, amdgpu_amdkfd_device_init()
126 kgd2kfd->device_init(rdev->kfd, &gpu_resources); amdgpu_amdkfd_device_init()
130 void amdgpu_amdkfd_device_fini(struct amdgpu_device *rdev) amdgpu_amdkfd_device_fini() argument
132 if (rdev->kfd) { amdgpu_amdkfd_device_fini()
133 kgd2kfd->device_exit(rdev->kfd); amdgpu_amdkfd_device_fini()
134 rdev->kfd = NULL; amdgpu_amdkfd_device_fini()
138 void amdgpu_amdkfd_interrupt(struct amdgpu_device *rdev, amdgpu_amdkfd_interrupt() argument
141 if (rdev->kfd) amdgpu_amdkfd_interrupt()
142 kgd2kfd->interrupt(rdev->kfd, ih_ring_entry); amdgpu_amdkfd_interrupt()
145 void amdgpu_amdkfd_suspend(struct amdgpu_device *rdev) amdgpu_amdkfd_suspend() argument
147 if (rdev->kfd) amdgpu_amdkfd_suspend()
148 kgd2kfd->suspend(rdev->kfd); amdgpu_amdkfd_suspend()
151 int amdgpu_amdkfd_resume(struct amdgpu_device *rdev) amdgpu_amdkfd_resume() argument
155 if (rdev->kfd) amdgpu_amdkfd_resume()
156 r = kgd2kfd->resume(rdev->kfd); amdgpu_amdkfd_resume()
173 struct amdgpu_device *rdev = (struct amdgpu_device *)kgd; alloc_gtt_mem() local
185 r = amdgpu_bo_create(rdev, size, PAGE_SIZE, true, AMDGPU_GEM_DOMAIN_GTT, alloc_gtt_mem()
188 dev_err(rdev->dev, alloc_gtt_mem()
196 dev_err(rdev->dev, "(%d) failed to reserve bo for amdkfd\n", r); alloc_gtt_mem()
203 dev_err(rdev->dev, "(%d) failed to pin bo for amdkfd\n", r); alloc_gtt_mem()
210 dev_err(rdev->dev, alloc_gtt_mem()
246 struct amdgpu_device *rdev = get_vmem_size() local
251 return rdev->mc.real_vram_size; get_vmem_size()
256 struct amdgpu_device *rdev = (struct amdgpu_device *)kgd; get_gpu_clock_counter() local
258 if (rdev->asic_funcs->get_gpu_clock_counter) get_gpu_clock_counter()
259 return rdev->asic_funcs->get_gpu_clock_counter(rdev); get_gpu_clock_counter()
265 struct amdgpu_device *rdev = (struct amdgpu_device *)kgd; get_max_engine_clock_in_mhz() local
268 return rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk / 100; get_max_engine_clock_in_mhz()
H A Damdgpu_amdkfd.h42 bool amdgpu_amdkfd_load_interface(struct amdgpu_device *rdev);
44 void amdgpu_amdkfd_suspend(struct amdgpu_device *rdev);
45 int amdgpu_amdkfd_resume(struct amdgpu_device *rdev);
46 void amdgpu_amdkfd_interrupt(struct amdgpu_device *rdev,
48 void amdgpu_amdkfd_device_probe(struct amdgpu_device *rdev);
49 void amdgpu_amdkfd_device_init(struct amdgpu_device *rdev);
50 void amdgpu_amdkfd_device_fini(struct amdgpu_device *rdev);
/linux-4.4.14/drivers/md/
H A Dlinear.h5 struct md_rdev *rdev; member in struct:dev_info
H A Dmd.c84 * Default number of read corrections we'll attempt on an rdev
368 struct md_rdev *rdev = bio->bi_private; md_end_flush() local
369 struct mddev *mddev = rdev->mddev; md_end_flush()
371 rdev_dec_pending(rdev, mddev); md_end_flush()
385 struct md_rdev *rdev; submit_flushes() local
390 rdev_for_each_rcu(rdev, mddev) rdev_for_each_rcu()
391 if (rdev->raid_disk >= 0 && rdev_for_each_rcu()
392 !test_bit(Faulty, &rdev->flags)) { rdev_for_each_rcu()
398 atomic_inc(&rdev->nr_pending); rdev_for_each_rcu()
399 atomic_inc(&rdev->nr_pending); rdev_for_each_rcu()
403 bi->bi_private = rdev; rdev_for_each_rcu()
404 bi->bi_bdev = rdev->bdev; rdev_for_each_rcu()
408 rdev_dec_pending(rdev, mddev); rdev_for_each_rcu()
640 struct md_rdev *rdev; md_find_rdev_nr_rcu() local
642 rdev_for_each_rcu(rdev, mddev) md_find_rdev_nr_rcu()
643 if (rdev->desc_nr == nr) md_find_rdev_nr_rcu()
644 return rdev; md_find_rdev_nr_rcu()
652 struct md_rdev *rdev; find_rdev() local
654 rdev_for_each(rdev, mddev) find_rdev()
655 if (rdev->bdev->bd_dev == dev) find_rdev()
656 return rdev; find_rdev()
663 struct md_rdev *rdev; find_rdev_rcu() local
665 rdev_for_each_rcu(rdev, mddev) find_rdev_rcu()
666 if (rdev->bdev->bd_dev == dev) find_rdev_rcu()
667 return rdev; find_rdev_rcu()
685 static inline sector_t calc_dev_sboffset(struct md_rdev *rdev) calc_dev_sboffset() argument
687 sector_t num_sectors = i_size_read(rdev->bdev->bd_inode) / 512; calc_dev_sboffset()
691 static int alloc_disk_sb(struct md_rdev *rdev) alloc_disk_sb() argument
693 rdev->sb_page = alloc_page(GFP_KERNEL); alloc_disk_sb()
694 if (!rdev->sb_page) { alloc_disk_sb()
702 void md_rdev_clear(struct md_rdev *rdev) md_rdev_clear() argument
704 if (rdev->sb_page) { md_rdev_clear()
705 put_page(rdev->sb_page); md_rdev_clear()
706 rdev->sb_loaded = 0; md_rdev_clear()
707 rdev->sb_page = NULL; md_rdev_clear()
708 rdev->sb_start = 0; md_rdev_clear()
709 rdev->sectors = 0; md_rdev_clear()
711 if (rdev->bb_page) { md_rdev_clear()
712 put_page(rdev->bb_page); md_rdev_clear()
713 rdev->bb_page = NULL; md_rdev_clear()
715 kfree(rdev->badblocks.page); md_rdev_clear()
716 rdev->badblocks.page = NULL; md_rdev_clear()
722 struct md_rdev *rdev = bio->bi_private; super_written() local
723 struct mddev *mddev = rdev->mddev; super_written()
727 md_error(mddev, rdev); super_written()
735 void md_super_write(struct mddev *mddev, struct md_rdev *rdev, md_super_write() argument
738 /* write first size bytes of page to sector of rdev md_super_write()
746 bio->bi_bdev = rdev->meta_bdev ? rdev->meta_bdev : rdev->bdev; md_super_write()
749 bio->bi_private = rdev; md_super_write()
762 int sync_page_io(struct md_rdev *rdev, sector_t sector, int size, sync_page_io() argument
765 struct bio *bio = bio_alloc_mddev(GFP_NOIO, 1, rdev->mddev); sync_page_io()
768 bio->bi_bdev = (metadata_op && rdev->meta_bdev) ? sync_page_io()
769 rdev->meta_bdev : rdev->bdev; sync_page_io()
771 bio->bi_iter.bi_sector = sector + rdev->sb_start; sync_page_io()
772 else if (rdev->mddev->reshape_position != MaxSector && sync_page_io()
773 (rdev->mddev->reshape_backwards == sync_page_io()
774 (sector >= rdev->mddev->reshape_position))) sync_page_io()
775 bio->bi_iter.bi_sector = sector + rdev->new_data_offset; sync_page_io()
777 bio->bi_iter.bi_sector = sector + rdev->data_offset; sync_page_io()
787 static int read_disk_sb(struct md_rdev *rdev, int size) read_disk_sb() argument
791 if (rdev->sb_loaded) read_disk_sb()
794 if (!sync_page_io(rdev, 0, size, rdev->sb_page, READ, true)) read_disk_sb()
796 rdev->sb_loaded = 1; read_disk_sb()
801 bdevname(rdev->bdev,b)); read_disk_sb()
904 * Update the superblock for rdev with data in mddev
912 int (*load_super)(struct md_rdev *rdev,
916 struct md_rdev *rdev);
918 struct md_rdev *rdev);
919 unsigned long long (*rdev_size_change)(struct md_rdev *rdev,
921 int (*allow_new_offset)(struct md_rdev *rdev,
946 static int super_90_load(struct md_rdev *rdev, struct md_rdev *refdev, int minor_version) super_90_load() argument
958 rdev->sb_start = calc_dev_sboffset(rdev); super_90_load()
960 ret = read_disk_sb(rdev, MD_SB_BYTES); super_90_load()
965 bdevname(rdev->bdev, b); super_90_load()
966 sb = page_address(rdev->sb_page); super_90_load()
992 rdev->preferred_minor = sb->md_minor; super_90_load()
993 rdev->data_offset = 0; super_90_load()
994 rdev->new_data_offset = 0; super_90_load()
995 rdev->sb_size = MD_SB_BYTES; super_90_load()
996 rdev->badblocks.shift = -1; super_90_load()
999 rdev->desc_nr = -1; super_90_load()
1001 rdev->desc_nr = sb->this_disk.number; super_90_load()
1026 rdev->sectors = rdev->sb_start; super_90_load()
1031 if (rdev->sectors >= (2ULL << 32) && sb->level >= 1) super_90_load()
1032 rdev->sectors = (2ULL << 32) - 2; super_90_load()
1034 if (rdev->sectors < ((sector_t)sb->size) * 2 && sb->level >= 1) super_90_load()
1045 static int super_90_validate(struct mddev *mddev, struct md_rdev *rdev) super_90_validate() argument
1048 mdp_super_t *sb = page_address(rdev->sb_page); super_90_validate()
1051 rdev->raid_disk = -1; super_90_validate()
1052 clear_bit(Faulty, &rdev->flags); super_90_validate()
1053 clear_bit(In_sync, &rdev->flags); super_90_validate()
1054 clear_bit(Bitmap_sync, &rdev->flags); super_90_validate()
1055 clear_bit(WriteMostly, &rdev->flags); super_90_validate()
1123 if (sb->disks[rdev->desc_nr].state & ( super_90_validate()
1134 set_bit(Bitmap_sync, &rdev->flags); super_90_validate()
1142 desc = sb->disks + rdev->desc_nr; super_90_validate()
1145 set_bit(Faulty, &rdev->flags); super_90_validate()
1148 set_bit(In_sync, &rdev->flags); super_90_validate()
1149 rdev->raid_disk = desc->raid_disk; super_90_validate()
1150 rdev->saved_raid_disk = desc->raid_disk; super_90_validate()
1156 rdev->recovery_offset = 0; super_90_validate()
1157 rdev->raid_disk = desc->raid_disk; super_90_validate()
1161 set_bit(WriteMostly, &rdev->flags); super_90_validate()
1163 set_bit(In_sync, &rdev->flags); super_90_validate()
1170 static void super_90_sync(struct mddev *mddev, struct md_rdev *rdev) super_90_sync() argument
1176 /* make rdev->sb match mddev data.. super_90_sync()
1189 rdev->sb_size = MD_SB_BYTES; super_90_sync()
1191 sb = page_address(rdev->sb_page); super_90_sync()
1305 sb->this_disk = sb->disks[rdev->desc_nr];
1313 super_90_rdev_size_change(struct md_rdev *rdev, sector_t num_sectors) super_90_rdev_size_change() argument
1315 if (num_sectors && num_sectors < rdev->mddev->dev_sectors) super_90_rdev_size_change()
1317 if (rdev->mddev->bitmap_info.offset) super_90_rdev_size_change()
1319 rdev->sb_start = calc_dev_sboffset(rdev); super_90_rdev_size_change()
1320 if (!num_sectors || num_sectors > rdev->sb_start) super_90_rdev_size_change()
1321 num_sectors = rdev->sb_start; super_90_rdev_size_change()
1325 if (num_sectors >= (2ULL << 32) && rdev->mddev->level >= 1) super_90_rdev_size_change()
1327 md_super_write(rdev->mddev, rdev, rdev->sb_start, rdev->sb_size, super_90_rdev_size_change()
1328 rdev->sb_page); super_90_rdev_size_change()
1329 md_super_wait(rdev->mddev); super_90_rdev_size_change()
1334 super_90_allow_new_offset(struct md_rdev *rdev, unsigned long long new_offset) super_90_allow_new_offset() argument
1368 static int super_1_load(struct md_rdev *rdev, struct md_rdev *refdev, int minor_version) super_1_load() argument
1387 sb_start = i_size_read(rdev->bdev->bd_inode) >> 9; super_1_load()
1400 rdev->sb_start = sb_start; super_1_load()
1405 ret = read_disk_sb(rdev, 4096); super_1_load()
1408 sb = page_address(rdev->sb_page); super_1_load()
1413 le64_to_cpu(sb->super_offset) != rdev->sb_start || super_1_load()
1419 bdevname(rdev->bdev,b)); super_1_load()
1424 bdevname(rdev->bdev,b)); super_1_load()
1433 rdev->preferred_minor = 0xffff; super_1_load()
1434 rdev->data_offset = le64_to_cpu(sb->data_offset); super_1_load()
1435 rdev->new_data_offset = rdev->data_offset; super_1_load()
1438 rdev->new_data_offset += (s32)le32_to_cpu(sb->new_offset); super_1_load()
1439 atomic_set(&rdev->corrected_errors, le32_to_cpu(sb->cnt_corrected_read)); super_1_load()
1441 rdev->sb_size = le32_to_cpu(sb->max_dev) * 2 + 256; super_1_load()
1442 bmask = queue_logical_block_size(rdev->bdev->bd_disk->queue)-1; super_1_load()
1443 if (rdev->sb_size & bmask) super_1_load()
1444 rdev->sb_size = (rdev->sb_size | bmask) + 1; super_1_load()
1447 && rdev->data_offset < sb_start + (rdev->sb_size/512)) super_1_load()
1450 && rdev->new_data_offset < sb_start + (rdev->sb_size/512)) super_1_load()
1454 rdev->desc_nr = -1; super_1_load()
1456 rdev->desc_nr = le32_to_cpu(sb->dev_number); super_1_load()
1458 if (!rdev->bb_page) { super_1_load()
1459 rdev->bb_page = alloc_page(GFP_KERNEL); super_1_load()
1460 if (!rdev->bb_page) super_1_load()
1464 rdev->badblocks.count == 0) { super_1_load()
1479 if (!sync_page_io(rdev, bb_sector, sectors << 9, super_1_load()
1480 rdev->bb_page, READ, true)) super_1_load()
1482 bbp = (u64 *)page_address(rdev->bb_page); super_1_load()
1483 rdev->badblocks.shift = sb->bblog_shift; super_1_load()
1492 if (md_set_badblocks(&rdev->badblocks, super_1_load()
1497 rdev->badblocks.shift = 0; super_1_load()
1511 bdevname(rdev->bdev,b), super_1_load()
1524 sectors = (i_size_read(rdev->bdev->bd_inode) >> 9); super_1_load()
1525 sectors -= rdev->data_offset; super_1_load()
1527 sectors = rdev->sb_start; super_1_load()
1530 rdev->sectors = le64_to_cpu(sb->data_size); super_1_load()
1534 static int super_1_validate(struct mddev *mddev, struct md_rdev *rdev) super_1_validate() argument
1536 struct mdp_superblock_1 *sb = page_address(rdev->sb_page); super_1_validate()
1539 rdev->raid_disk = -1; super_1_validate()
1540 clear_bit(Faulty, &rdev->flags); super_1_validate()
1541 clear_bit(In_sync, &rdev->flags); super_1_validate()
1542 clear_bit(Bitmap_sync, &rdev->flags); super_1_validate()
1543 clear_bit(WriteMostly, &rdev->flags); super_1_validate()
1614 if (rdev->desc_nr >= 0 && super_1_validate()
1615 rdev->desc_nr < le32_to_cpu(sb->max_dev) && super_1_validate()
1616 (le16_to_cpu(sb->dev_roles[rdev->desc_nr]) < MD_DISK_ROLE_MAX || super_1_validate()
1617 le16_to_cpu(sb->dev_roles[rdev->desc_nr]) == MD_DISK_ROLE_JOURNAL)) super_1_validate()
1627 set_bit(Bitmap_sync, &rdev->flags); super_1_validate()
1635 if (rdev->desc_nr < 0 || super_1_validate()
1636 rdev->desc_nr >= le32_to_cpu(sb->max_dev)) { super_1_validate()
1638 rdev->desc_nr = -1; super_1_validate()
1640 role = le16_to_cpu(sb->dev_roles[rdev->desc_nr]); super_1_validate()
1645 set_bit(Faulty, &rdev->flags); super_1_validate()
1654 set_bit(Journal, &rdev->flags); super_1_validate()
1655 rdev->journal_tail = le64_to_cpu(sb->journal_tail); super_1_validate()
1658 rdev->raid_disk = 0; super_1_validate()
1661 rdev->saved_raid_disk = role; super_1_validate()
1664 rdev->recovery_offset = le64_to_cpu(sb->recovery_offset); super_1_validate()
1667 rdev->saved_raid_disk = -1; super_1_validate()
1669 set_bit(In_sync, &rdev->flags); super_1_validate()
1670 rdev->raid_disk = role; super_1_validate()
1674 set_bit(WriteMostly, &rdev->flags); super_1_validate()
1676 set_bit(Replacement, &rdev->flags); super_1_validate()
1680 set_bit(In_sync, &rdev->flags); super_1_validate()
1685 static void super_1_sync(struct mddev *mddev, struct md_rdev *rdev) super_1_sync() argument
1690 /* make rdev->sb match mddev and rdev data. */ super_1_sync()
1692 sb = page_address(rdev->sb_page); super_1_sync()
1708 sb->cnt_corrected_read = cpu_to_le32(atomic_read(&rdev->corrected_errors)); super_1_sync()
1716 if (test_bit(WriteMostly, &rdev->flags)) super_1_sync()
1720 sb->data_offset = cpu_to_le64(rdev->data_offset); super_1_sync()
1721 sb->data_size = cpu_to_le64(rdev->sectors); super_1_sync()
1728 if (rdev->raid_disk >= 0 && !test_bit(Journal, &rdev->flags) && super_1_sync()
1729 !test_bit(In_sync, &rdev->flags)) { super_1_sync()
1733 cpu_to_le64(rdev->recovery_offset); super_1_sync()
1734 if (rdev->saved_raid_disk >= 0 && mddev->bitmap) super_1_sync()
1739 if (test_bit(Journal, &rdev->flags)) super_1_sync()
1740 sb->journal_tail = cpu_to_le64(rdev->journal_tail); super_1_sync()
1741 if (test_bit(Replacement, &rdev->flags)) super_1_sync()
1756 if (rdev->new_data_offset != rdev->data_offset) { super_1_sync()
1759 sb->new_offset = cpu_to_le32((__u32)(rdev->new_data_offset super_1_sync()
1760 - rdev->data_offset)); super_1_sync()
1767 if (rdev->badblocks.count == 0) super_1_sync()
1771 md_error(mddev, rdev); super_1_sync()
1773 struct badblocks *bb = &rdev->badblocks; super_1_sync()
1774 u64 *bbp = (u64 *)page_address(rdev->bb_page); super_1_sync()
1795 bb->sector = (rdev->sb_start + super_1_sync()
1809 rdev->sb_size = max_dev * 2 + 256; super_1_sync()
1810 bmask = queue_logical_block_size(rdev->bdev->bd_disk->queue)-1; super_1_sync()
1811 if (rdev->sb_size & bmask) super_1_sync()
1812 rdev->sb_size = (rdev->sb_size | bmask) + 1; super_1_sync()
1840 super_1_rdev_size_change(struct md_rdev *rdev, sector_t num_sectors) super_1_rdev_size_change() argument
1844 if (num_sectors && num_sectors < rdev->mddev->dev_sectors) super_1_rdev_size_change()
1846 if (rdev->data_offset != rdev->new_data_offset) super_1_rdev_size_change()
1848 if (rdev->sb_start < rdev->data_offset) { super_1_rdev_size_change()
1850 max_sectors = i_size_read(rdev->bdev->bd_inode) >> 9; super_1_rdev_size_change()
1851 max_sectors -= rdev->data_offset; super_1_rdev_size_change()
1854 } else if (rdev->mddev->bitmap_info.offset) { super_1_rdev_size_change()
1860 sb_start = (i_size_read(rdev->bdev->bd_inode) >> 9) - 8*2; super_1_rdev_size_change()
1862 max_sectors = rdev->sectors + sb_start - rdev->sb_start; super_1_rdev_size_change()
1865 rdev->sb_start = sb_start; super_1_rdev_size_change()
1867 sb = page_address(rdev->sb_page); super_1_rdev_size_change()
1869 sb->super_offset = rdev->sb_start; super_1_rdev_size_change()
1871 md_super_write(rdev->mddev, rdev, rdev->sb_start, rdev->sb_size, super_1_rdev_size_change()
1872 rdev->sb_page); super_1_rdev_size_change()
1873 md_super_wait(rdev->mddev); super_1_rdev_size_change()
1879 super_1_allow_new_offset(struct md_rdev *rdev, super_1_allow_new_offset() argument
1884 if (new_offset >= rdev->data_offset) super_1_allow_new_offset()
1889 if (rdev->mddev->minor_version == 0) super_1_allow_new_offset()
1898 if (rdev->sb_start + (32+4)*2 > new_offset) super_1_allow_new_offset()
1900 bitmap = rdev->mddev->bitmap; super_1_allow_new_offset()
1901 if (bitmap && !rdev->mddev->bitmap_info.file && super_1_allow_new_offset()
1902 rdev->sb_start + rdev->mddev->bitmap_info.offset + super_1_allow_new_offset()
1905 if (rdev->badblocks.sector + rdev->badblocks.size > new_offset) super_1_allow_new_offset()
1932 static void sync_super(struct mddev *mddev, struct md_rdev *rdev) sync_super() argument
1935 mddev->sync_super(mddev, rdev); sync_super()
1941 super_types[mddev->major_version].sync_super(mddev, rdev); sync_super()
1946 struct md_rdev *rdev, *rdev2; match_mddev_units() local
1949 rdev_for_each_rcu(rdev, mddev1) { rdev_for_each_rcu()
1950 if (test_bit(Faulty, &rdev->flags) || rdev_for_each_rcu()
1951 test_bit(Journal, &rdev->flags) || rdev_for_each_rcu()
1952 rdev->raid_disk == -1) rdev_for_each_rcu()
1959 if (rdev->bdev->bd_contains == rdev_for_each_rcu()
1981 struct md_rdev *rdev, *reference = NULL; md_integrity_register() local
1987 rdev_for_each(rdev, mddev) { rdev_for_each()
1989 if (test_bit(Faulty, &rdev->flags)) rdev_for_each()
1991 if (rdev->raid_disk < 0) rdev_for_each()
1994 /* Use the first rdev as the reference */ rdev_for_each()
1995 reference = rdev; rdev_for_each()
1998 /* does this rdev's profile match the reference profile? */ rdev_for_each()
2000 rdev->bdev->bd_disk) < 0) rdev_for_each()
2023 * Attempt to add an rdev, but only if it is consistent with the current
2026 int md_integrity_add_rdev(struct md_rdev *rdev, struct mddev *mddev) md_integrity_add_rdev() argument
2035 bi_rdev = bdev_get_integrity(rdev->bdev); md_integrity_add_rdev()
2041 if (blk_integrity_compare(mddev->gendisk, rdev->bdev->bd_disk) != 0) { md_integrity_add_rdev()
2043 mdname(mddev), bdevname(rdev->bdev, name)); md_integrity_add_rdev()
2051 static int bind_rdev_to_array(struct md_rdev *rdev, struct mddev *mddev) bind_rdev_to_array() argument
2058 if (find_rdev(mddev, rdev->bdev->bd_dev)) bind_rdev_to_array()
2061 /* make sure rdev->sectors exceeds mddev->dev_sectors */ bind_rdev_to_array()
2062 if (rdev->sectors && (mddev->dev_sectors == 0 || bind_rdev_to_array()
2063 rdev->sectors < mddev->dev_sectors)) { bind_rdev_to_array()
2072 mddev->dev_sectors = rdev->sectors; bind_rdev_to_array()
2075 /* Verify rdev->desc_nr is unique. bind_rdev_to_array()
2080 if (rdev->desc_nr < 0) { bind_rdev_to_array()
2086 rdev->desc_nr = choice; bind_rdev_to_array()
2088 if (md_find_rdev_nr_rcu(mddev, rdev->desc_nr)) { bind_rdev_to_array()
2094 if (mddev->max_disks && rdev->desc_nr >= mddev->max_disks) { bind_rdev_to_array()
2099 bdevname(rdev->bdev,b); bind_rdev_to_array()
2102 rdev->mddev = mddev; bind_rdev_to_array()
2105 if ((err = kobject_add(&rdev->kobj, &mddev->kobj, "dev-%s", b))) bind_rdev_to_array()
2108 ko = &part_to_dev(rdev->bdev->bd_part)->kobj; bind_rdev_to_array()
2109 if (sysfs_create_link(&rdev->kobj, ko, "block")) bind_rdev_to_array()
2111 rdev->sysfs_state = sysfs_get_dirent_safe(rdev->kobj.sd, "state"); bind_rdev_to_array()
2113 list_add_rcu(&rdev->same_set, &mddev->disks); bind_rdev_to_array()
2114 bd_link_disk_holder(rdev->bdev, mddev->gendisk); bind_rdev_to_array()
2129 struct md_rdev *rdev = container_of(ws, struct md_rdev, del_work); md_delayed_delete() local
2130 kobject_del(&rdev->kobj); md_delayed_delete()
2131 kobject_put(&rdev->kobj); md_delayed_delete()
2134 static void unbind_rdev_from_array(struct md_rdev *rdev) unbind_rdev_from_array() argument
2138 bd_unlink_disk_holder(rdev->bdev, rdev->mddev->gendisk); unbind_rdev_from_array()
2139 list_del_rcu(&rdev->same_set); unbind_rdev_from_array()
2140 printk(KERN_INFO "md: unbind<%s>\n", bdevname(rdev->bdev,b)); unbind_rdev_from_array()
2141 rdev->mddev = NULL; unbind_rdev_from_array()
2142 sysfs_remove_link(&rdev->kobj, "block"); unbind_rdev_from_array()
2143 sysfs_put(rdev->sysfs_state); unbind_rdev_from_array()
2144 rdev->sysfs_state = NULL; unbind_rdev_from_array()
2145 rdev->badblocks.count = 0; unbind_rdev_from_array()
2151 INIT_WORK(&rdev->del_work, md_delayed_delete); unbind_rdev_from_array()
2152 kobject_get(&rdev->kobj); unbind_rdev_from_array()
2153 queue_work(md_misc_wq, &rdev->del_work); unbind_rdev_from_array()
2161 static int lock_rdev(struct md_rdev *rdev, dev_t dev, int shared) lock_rdev() argument
2168 shared ? (struct md_rdev *)lock_rdev : rdev); lock_rdev()
2174 rdev->bdev = bdev; lock_rdev()
2178 static void unlock_rdev(struct md_rdev *rdev) unlock_rdev() argument
2180 struct block_device *bdev = rdev->bdev; unlock_rdev()
2181 rdev->bdev = NULL; unlock_rdev()
2187 static void export_rdev(struct md_rdev *rdev) export_rdev() argument
2192 bdevname(rdev->bdev,b)); export_rdev()
2193 md_rdev_clear(rdev); export_rdev()
2195 if (test_bit(AutoDetected, &rdev->flags)) export_rdev()
2196 md_autodetect_dev(rdev->bdev->bd_dev); export_rdev()
2198 unlock_rdev(rdev); export_rdev()
2199 kobject_put(&rdev->kobj); export_rdev()
2202 void md_kick_rdev_from_array(struct md_rdev *rdev) md_kick_rdev_from_array() argument
2204 unbind_rdev_from_array(rdev); md_kick_rdev_from_array()
2205 export_rdev(rdev); md_kick_rdev_from_array()
2211 struct md_rdev *rdev; export_array() local
2214 rdev = list_first_entry(&mddev->disks, struct md_rdev, export_array()
2216 md_kick_rdev_from_array(rdev); export_array()
2230 struct md_rdev *rdev; rdev_for_each() local
2231 rdev_for_each(rdev, mddev) { rdev_for_each()
2232 if (rdev->sb_events == mddev->events || rdev_for_each()
2234 rdev->raid_disk < 0 && rdev_for_each()
2235 rdev->sb_events+1 == mddev->events)) { rdev_for_each()
2237 rdev->sb_loaded = 2; rdev_for_each()
2239 sync_super(mddev, rdev); rdev_for_each()
2240 rdev->sb_loaded = 1; rdev_for_each()
2247 struct md_rdev *rdev; does_sb_need_changing() local
2251 /* Find a good rdev */ does_sb_need_changing()
2252 rdev_for_each(rdev, mddev) does_sb_need_changing()
2253 if ((rdev->raid_disk >= 0) && !test_bit(Faulty, &rdev->flags)) does_sb_need_changing()
2257 if (!rdev) does_sb_need_changing()
2260 sb = page_address(rdev->sb_page); does_sb_need_changing()
2262 rdev_for_each(rdev, mddev) { rdev_for_each()
2263 role = le16_to_cpu(sb->dev_roles[rdev->desc_nr]); rdev_for_each()
2265 if (role == 0xffff && rdev->raid_disk >=0 && rdev_for_each()
2266 !test_bit(Faulty, &rdev->flags)) rdev_for_each()
2269 if (test_bit(Faulty, &rdev->flags) && (role < 0xfffd)) rdev_for_each()
2286 struct md_rdev *rdev; md_update_sb() local
2312 rdev_for_each(rdev, mddev) { rdev_for_each()
2313 if (rdev->raid_disk >= 0 && rdev_for_each()
2315 !test_bit(Journal, &rdev->flags) && rdev_for_each()
2316 !test_bit(In_sync, &rdev->flags) && rdev_for_each()
2317 mddev->curr_resync_completed > rdev->recovery_offset) rdev_for_each()
2318 rdev->recovery_offset = mddev->curr_resync_completed; rdev_for_each()
2326 rdev_for_each(rdev, mddev) { rdev_for_each()
2327 if (rdev->badblocks.changed) { rdev_for_each()
2328 rdev->badblocks.changed = 0; rdev_for_each()
2329 md_ack_all_badblocks(&rdev->badblocks); rdev_for_each()
2330 md_error(mddev, rdev); rdev_for_each()
2332 clear_bit(Blocked, &rdev->flags); rdev_for_each()
2333 clear_bit(BlockedBadBlocks, &rdev->flags); rdev_for_each()
2334 wake_up(&rdev->blocked_wait); rdev_for_each()
2390 rdev_for_each(rdev, mddev) { rdev_for_each()
2391 if (rdev->badblocks.changed) rdev_for_each()
2393 if (test_bit(Faulty, &rdev->flags)) rdev_for_each()
2394 set_bit(FaultRecorded, &rdev->flags); rdev_for_each()
2404 rdev_for_each(rdev, mddev) { rdev_for_each()
2407 if (rdev->sb_loaded != 1) rdev_for_each()
2410 if (!test_bit(Faulty, &rdev->flags)) { rdev_for_each()
2411 md_super_write(mddev,rdev, rdev_for_each()
2412 rdev->sb_start, rdev->sb_size, rdev_for_each()
2413 rdev->sb_page); rdev_for_each()
2415 bdevname(rdev->bdev, b), rdev_for_each()
2416 (unsigned long long)rdev->sb_start); rdev_for_each()
2417 rdev->sb_events = mddev->events; rdev_for_each()
2418 if (rdev->badblocks.size) { rdev_for_each()
2419 md_super_write(mddev, rdev, rdev_for_each()
2420 rdev->badblocks.sector, rdev_for_each()
2421 rdev->badblocks.size << 9, rdev_for_each()
2422 rdev->bb_page); rdev_for_each()
2423 rdev->badblocks.size = 0; rdev_for_each()
2428 bdevname(rdev->bdev, b)); rdev_for_each()
2450 rdev_for_each(rdev, mddev) { rdev_for_each()
2451 if (test_and_clear_bit(FaultRecorded, &rdev->flags)) rdev_for_each()
2452 clear_bit(Blocked, &rdev->flags); rdev_for_each()
2455 md_ack_all_badblocks(&rdev->badblocks); rdev_for_each()
2456 clear_bit(BlockedBadBlocks, &rdev->flags); rdev_for_each()
2457 wake_up(&rdev->blocked_wait); rdev_for_each()
2465 static int add_bound_rdev(struct md_rdev *rdev) add_bound_rdev() argument
2467 struct mddev *mddev = rdev->mddev; add_bound_rdev()
2476 validate_super(mddev, rdev); add_bound_rdev()
2477 err = mddev->pers->hot_add_disk(mddev, rdev); add_bound_rdev()
2479 unbind_rdev_from_array(rdev); add_bound_rdev()
2480 export_rdev(rdev); add_bound_rdev()
2484 sysfs_notify_dirent_safe(rdev->sysfs_state); add_bound_rdev()
2522 state_show(struct md_rdev *rdev, char *page) state_show() argument
2526 unsigned long flags = ACCESS_ONCE(rdev->flags); state_show()
2529 rdev->badblocks.unacked_exist) { state_show()
2546 (rdev->badblocks.unacked_exist state_show()
2574 state_store(struct md_rdev *rdev, const char *buf, size_t len) state_store() argument
2590 if (cmd_match(buf, "faulty") && rdev->mddev->pers) { state_store()
2591 md_error(rdev->mddev, rdev); state_store()
2592 if (test_bit(Faulty, &rdev->flags)) state_store()
2597 if (rdev->raid_disk >= 0) state_store()
2600 struct mddev *mddev = rdev->mddev; state_store()
2603 err = md_cluster_ops->remove_disk(mddev, rdev); state_store()
2606 md_kick_rdev_from_array(rdev); state_store()
2613 set_bit(WriteMostly, &rdev->flags); state_store()
2616 clear_bit(WriteMostly, &rdev->flags); state_store()
2619 set_bit(Blocked, &rdev->flags); state_store()
2622 if (!test_bit(Faulty, &rdev->flags) && state_store()
2623 rdev->badblocks.unacked_exist) { state_store()
2627 md_error(rdev->mddev, rdev); state_store()
2629 clear_bit(Blocked, &rdev->flags); state_store()
2630 clear_bit(BlockedBadBlocks, &rdev->flags); state_store()
2631 wake_up(&rdev->blocked_wait); state_store()
2632 set_bit(MD_RECOVERY_NEEDED, &rdev->mddev->recovery); state_store()
2633 md_wakeup_thread(rdev->mddev->thread); state_store()
2636 } else if (cmd_match(buf, "insync") && rdev->raid_disk == -1) { state_store()
2637 set_bit(In_sync, &rdev->flags); state_store()
2639 } else if (cmd_match(buf, "-insync") && rdev->raid_disk >= 0 && state_store()
2640 !test_bit(Journal, &rdev->flags)) { state_store()
2641 if (rdev->mddev->pers == NULL) { state_store()
2642 clear_bit(In_sync, &rdev->flags); state_store()
2643 rdev->saved_raid_disk = rdev->raid_disk; state_store()
2644 rdev->raid_disk = -1; state_store()
2648 set_bit(WriteErrorSeen, &rdev->flags); state_store()
2651 clear_bit(WriteErrorSeen, &rdev->flags); state_store()
2658 if (rdev->raid_disk >= 0 && state_store()
2659 !test_bit(Journal, &rdev->flags) && state_store()
2660 !test_bit(Replacement, &rdev->flags)) state_store()
2661 set_bit(WantReplacement, &rdev->flags); state_store()
2662 set_bit(MD_RECOVERY_NEEDED, &rdev->mddev->recovery); state_store()
2663 md_wakeup_thread(rdev->mddev->thread); state_store()
2670 clear_bit(WantReplacement, &rdev->flags); state_store()
2676 if (rdev->mddev->pers) state_store()
2679 set_bit(Replacement, &rdev->flags); state_store()
2684 if (rdev->mddev->pers) state_store()
2687 clear_bit(Replacement, &rdev->flags); state_store()
2691 if (test_bit(Faulty, &rdev->flags) && (rdev->raid_disk == -1)) { state_store()
2698 if (!mddev_is_clustered(rdev->mddev) || state_store()
2699 (err = md_cluster_ops->gather_bitmaps(rdev)) == 0) { state_store()
2700 clear_bit(Faulty, &rdev->flags); state_store()
2701 err = add_bound_rdev(rdev); state_store()
2707 sysfs_notify_dirent_safe(rdev->sysfs_state); state_store()
2714 errors_show(struct md_rdev *rdev, char *page) errors_show() argument
2716 return sprintf(page, "%d\n", atomic_read(&rdev->corrected_errors)); errors_show()
2720 errors_store(struct md_rdev *rdev, const char *buf, size_t len) errors_store() argument
2728 atomic_set(&rdev->corrected_errors, n); errors_store()
2735 slot_show(struct md_rdev *rdev, char *page) slot_show() argument
2737 if (test_bit(Journal, &rdev->flags)) slot_show()
2739 else if (rdev->raid_disk < 0) slot_show()
2742 return sprintf(page, "%d\n", rdev->raid_disk); slot_show()
2746 slot_store(struct md_rdev *rdev, const char *buf, size_t len) slot_store() argument
2751 if (test_bit(Journal, &rdev->flags)) slot_store()
2760 if (rdev->mddev->pers && slot == -1) { slot_store()
2768 if (rdev->raid_disk == -1) slot_store()
2771 if (rdev->mddev->pers->hot_remove_disk == NULL) slot_store()
2773 clear_bit(Blocked, &rdev->flags); slot_store()
2774 remove_and_add_spares(rdev->mddev, rdev); slot_store()
2775 if (rdev->raid_disk >= 0) slot_store()
2777 set_bit(MD_RECOVERY_NEEDED, &rdev->mddev->recovery); slot_store()
2778 md_wakeup_thread(rdev->mddev->thread); slot_store()
2779 } else if (rdev->mddev->pers) { slot_store()
2785 if (rdev->raid_disk != -1) slot_store()
2788 if (test_bit(MD_RECOVERY_RUNNING, &rdev->mddev->recovery)) slot_store()
2791 if (rdev->mddev->pers->hot_add_disk == NULL) slot_store()
2794 if (slot >= rdev->mddev->raid_disks && slot_store()
2795 slot >= rdev->mddev->raid_disks + rdev->mddev->delta_disks) slot_store()
2798 rdev->raid_disk = slot; slot_store()
2799 if (test_bit(In_sync, &rdev->flags)) slot_store()
2800 rdev->saved_raid_disk = slot; slot_store()
2802 rdev->saved_raid_disk = -1; slot_store()
2803 clear_bit(In_sync, &rdev->flags); slot_store()
2804 clear_bit(Bitmap_sync, &rdev->flags); slot_store()
2805 err = rdev->mddev->pers-> slot_store()
2806 hot_add_disk(rdev->mddev, rdev); slot_store()
2808 rdev->raid_disk = -1; slot_store()
2811 sysfs_notify_dirent_safe(rdev->sysfs_state); slot_store()
2812 if (sysfs_link_rdev(rdev->mddev, rdev)) slot_store()
2816 if (slot >= rdev->mddev->raid_disks && slot_store()
2817 slot >= rdev->mddev->raid_disks + rdev->mddev->delta_disks) slot_store()
2819 rdev->raid_disk = slot; slot_store()
2821 clear_bit(Faulty, &rdev->flags); slot_store()
2822 clear_bit(WriteMostly, &rdev->flags); slot_store()
2823 set_bit(In_sync, &rdev->flags); slot_store()
2824 sysfs_notify_dirent_safe(rdev->sysfs_state); slot_store()
2833 offset_show(struct md_rdev *rdev, char *page) offset_show() argument
2835 return sprintf(page, "%llu\n", (unsigned long long)rdev->data_offset); offset_show()
2839 offset_store(struct md_rdev *rdev, const char *buf, size_t len) offset_store() argument
2844 if (rdev->mddev->pers && rdev->raid_disk >= 0) offset_store()
2846 if (rdev->sectors && rdev->mddev->external) offset_store()
2850 rdev->data_offset = offset; offset_store()
2851 rdev->new_data_offset = offset; offset_store()
2858 static ssize_t new_offset_show(struct md_rdev *rdev, char *page) new_offset_show() argument
2861 (unsigned long long)rdev->new_data_offset); new_offset_show()
2864 static ssize_t new_offset_store(struct md_rdev *rdev, new_offset_store() argument
2868 struct mddev *mddev = rdev->mddev; new_offset_store()
2876 if (new_offset == rdev->data_offset) new_offset_store()
2879 else if (new_offset > rdev->data_offset) { new_offset_store()
2881 if (new_offset - rdev->data_offset new_offset_store()
2882 + mddev->dev_sectors > rdev->sectors) new_offset_store()
2890 if (new_offset < rdev->data_offset && new_offset_store()
2897 if (new_offset > rdev->data_offset && new_offset_store()
2903 .allow_new_offset(rdev, new_offset)) new_offset_store()
2905 rdev->new_data_offset = new_offset; new_offset_store()
2906 if (new_offset > rdev->data_offset) new_offset_store()
2908 else if (new_offset < rdev->data_offset) new_offset_store()
2917 rdev_size_show(struct md_rdev *rdev, char *page) rdev_size_show() argument
2919 return sprintf(page, "%llu\n", (unsigned long long)rdev->sectors / 2); rdev_size_show()
2952 rdev_size_store(struct md_rdev *rdev, const char *buf, size_t len) rdev_size_store() argument
2954 struct mddev *my_mddev = rdev->mddev; rdev_size_store()
2955 sector_t oldsectors = rdev->sectors; rdev_size_store()
2958 if (test_bit(Journal, &rdev->flags)) rdev_size_store()
2962 if (rdev->data_offset != rdev->new_data_offset) rdev_size_store()
2964 if (my_mddev->pers && rdev->raid_disk >= 0) { rdev_size_store()
2967 rdev_size_change(rdev, sectors); rdev_size_store()
2971 sectors = (i_size_read(rdev->bdev->bd_inode) >> 9) - rdev_size_store()
2972 rdev->data_offset; rdev_size_store()
2980 rdev->sectors = sectors; rdev_size_store()
2984 * the rdev lists safely. rdev_size_store()
2997 if (rdev->bdev == rdev2->bdev && rdev_for_each()
2998 rdev != rdev2 && rdev_for_each()
2999 overlaps(rdev->data_offset, rdev->sectors, rdev_for_each()
3018 rdev->sectors = oldsectors;
3028 static ssize_t recovery_start_show(struct md_rdev *rdev, char *page) recovery_start_show() argument
3030 unsigned long long recovery_start = rdev->recovery_offset; recovery_start_show()
3032 if (test_bit(In_sync, &rdev->flags) || recovery_start_show()
3039 static ssize_t recovery_start_store(struct md_rdev *rdev, const char *buf, size_t len) recovery_start_store() argument
3048 if (rdev->mddev->pers && recovery_start_store()
3049 rdev->raid_disk >= 0) recovery_start_store()
3052 rdev->recovery_offset = recovery_start; recovery_start_store()
3054 set_bit(In_sync, &rdev->flags); recovery_start_store()
3056 clear_bit(In_sync, &rdev->flags); recovery_start_store()
3068 static ssize_t bb_show(struct md_rdev *rdev, char *page) bb_show() argument
3070 return badblocks_show(&rdev->badblocks, page, 0); bb_show()
3072 static ssize_t bb_store(struct md_rdev *rdev, const char *page, size_t len) bb_store() argument
3074 int rv = badblocks_store(&rdev->badblocks, page, len, 0); bb_store()
3076 if (test_and_clear_bit(BlockedBadBlocks, &rdev->flags)) bb_store()
3077 wake_up(&rdev->blocked_wait); bb_store()
3083 static ssize_t ubb_show(struct md_rdev *rdev, char *page) ubb_show() argument
3085 return badblocks_show(&rdev->badblocks, page, 1); ubb_show()
3087 static ssize_t ubb_store(struct md_rdev *rdev, const char *page, size_t len) ubb_store() argument
3089 return badblocks_store(&rdev->badblocks, page, len, 1); ubb_store()
3110 struct md_rdev *rdev = container_of(kobj, struct md_rdev, kobj); rdev_attr_show() local
3114 if (!rdev->mddev) rdev_attr_show()
3116 return entry->show(rdev, page); rdev_attr_show()
3124 struct md_rdev *rdev = container_of(kobj, struct md_rdev, kobj); rdev_attr_store() local
3126 struct mddev *mddev = rdev->mddev; rdev_attr_store()
3134 if (rdev->mddev == NULL) rdev_attr_store()
3137 rv = entry->store(rdev, page, length); rdev_attr_store()
3145 struct md_rdev *rdev = container_of(ko, struct md_rdev, kobj); rdev_free() local
3146 kfree(rdev); rdev_free()
3158 int md_rdev_init(struct md_rdev *rdev) md_rdev_init() argument
3160 rdev->desc_nr = -1; md_rdev_init()
3161 rdev->saved_raid_disk = -1; md_rdev_init()
3162 rdev->raid_disk = -1; md_rdev_init()
3163 rdev->flags = 0; md_rdev_init()
3164 rdev->data_offset = 0; md_rdev_init()
3165 rdev->new_data_offset = 0; md_rdev_init()
3166 rdev->sb_events = 0; md_rdev_init()
3167 rdev->last_read_error.tv_sec = 0; md_rdev_init()
3168 rdev->last_read_error.tv_nsec = 0; md_rdev_init()
3169 rdev->sb_loaded = 0; md_rdev_init()
3170 rdev->bb_page = NULL; md_rdev_init()
3171 atomic_set(&rdev->nr_pending, 0); md_rdev_init()
3172 atomic_set(&rdev->read_errors, 0); md_rdev_init()
3173 atomic_set(&rdev->corrected_errors, 0); md_rdev_init()
3175 INIT_LIST_HEAD(&rdev->same_set); md_rdev_init()
3176 init_waitqueue_head(&rdev->blocked_wait); md_rdev_init()
3182 rdev->badblocks.count = 0; md_rdev_init()
3183 rdev->badblocks.shift = -1; /* disabled until explicitly enabled */ md_rdev_init()
3184 rdev->badblocks.page = kmalloc(PAGE_SIZE, GFP_KERNEL); md_rdev_init()
3185 seqlock_init(&rdev->badblocks.lock); md_rdev_init()
3186 if (rdev->badblocks.page == NULL) md_rdev_init()
3200 * a faulty rdev _never_ has rdev->sb set.
3206 struct md_rdev *rdev; md_import_device() local
3209 rdev = kzalloc(sizeof(*rdev), GFP_KERNEL); md_import_device()
3210 if (!rdev) { md_import_device()
3215 err = md_rdev_init(rdev); md_import_device()
3218 err = alloc_disk_sb(rdev); md_import_device()
3222 err = lock_rdev(rdev, newdev, super_format == -2); md_import_device()
3226 kobject_init(&rdev->kobj, &rdev_ktype); md_import_device()
3228 size = i_size_read(rdev->bdev->bd_inode) >> BLOCK_SIZE_BITS; md_import_device()
3232 bdevname(rdev->bdev,b)); md_import_device()
3239 load_super(rdev, NULL, super_minor); md_import_device()
3244 bdevname(rdev->bdev,b), md_import_device()
3251 bdevname(rdev->bdev,b)); md_import_device()
3256 return rdev; md_import_device()
3259 if (rdev->bdev) md_import_device()
3260 unlock_rdev(rdev); md_import_device()
3261 md_rdev_clear(rdev); md_import_device()
3262 kfree(rdev); md_import_device()
3273 struct md_rdev *rdev, *freshest, *tmp; analyze_sbs() local
3277 rdev_for_each_safe(rdev, tmp, mddev) rdev_for_each_safe()
3279 load_super(rdev, freshest, mddev->minor_version)) { rdev_for_each_safe()
3281 freshest = rdev; rdev_for_each_safe()
3289 bdevname(rdev->bdev,b)); rdev_for_each_safe()
3290 md_kick_rdev_from_array(rdev); rdev_for_each_safe()
3297 rdev_for_each_safe(rdev, tmp, mddev) { rdev_for_each_safe()
3299 (rdev->desc_nr >= mddev->max_disks || rdev_for_each_safe()
3303 mdname(mddev), bdevname(rdev->bdev, b), rdev_for_each_safe()
3305 md_kick_rdev_from_array(rdev); rdev_for_each_safe()
3308 if (rdev != freshest) { rdev_for_each_safe()
3310 validate_super(mddev, rdev)) { rdev_for_each_safe()
3313 bdevname(rdev->bdev,b)); rdev_for_each_safe()
3314 md_kick_rdev_from_array(rdev); rdev_for_each_safe()
3319 rdev->desc_nr = i++; rdev_for_each_safe()
3320 rdev->raid_disk = rdev->desc_nr; rdev_for_each_safe()
3321 set_bit(In_sync, &rdev->flags); rdev_for_each_safe()
3322 } else if (rdev->raid_disk >= rdev_for_each_safe()
3324 !test_bit(Journal, &rdev->flags)) { rdev_for_each_safe()
3325 rdev->raid_disk = -1; rdev_for_each_safe()
3326 clear_bit(In_sync, &rdev->flags); rdev_for_each_safe()
3434 struct md_rdev *rdev; level_store() local
3510 rdev_for_each(rdev, mddev) level_store()
3511 rdev->new_raid_disk = rdev->raid_disk; level_store()
3581 rdev_for_each(rdev, mddev) { rdev_for_each()
3582 if (rdev->raid_disk < 0) rdev_for_each()
3584 if (rdev->new_raid_disk >= mddev->raid_disks) rdev_for_each()
3585 rdev->new_raid_disk = -1; rdev_for_each()
3586 if (rdev->new_raid_disk == rdev->raid_disk) rdev_for_each()
3588 sysfs_unlink_rdev(mddev, rdev); rdev_for_each()
3590 rdev_for_each(rdev, mddev) { rdev_for_each()
3591 if (rdev->raid_disk < 0) rdev_for_each()
3593 if (rdev->new_raid_disk == rdev->raid_disk) rdev_for_each()
3595 rdev->raid_disk = rdev->new_raid_disk; rdev_for_each()
3596 if (rdev->raid_disk < 0) rdev_for_each()
3597 clear_bit(In_sync, &rdev->flags); rdev_for_each()
3599 if (sysfs_link_rdev(mddev, rdev)) rdev_for_each()
3602 rdev->raid_disk, mdname(mddev)); rdev_for_each()
3706 struct md_rdev *rdev; raid_disks_store() local
3710 rdev_for_each(rdev, mddev) { rdev_for_each()
3712 rdev->data_offset < rdev->new_data_offset) rdev_for_each()
3715 rdev->data_offset > rdev->new_data_offset) rdev_for_each()
4079 struct md_rdev *rdev; new_dev_store() local
4098 rdev = md_import_device(dev, mddev->major_version, new_dev_store()
4100 if (!IS_ERR(rdev) && !list_empty(&mddev->disks)) { new_dev_store()
4105 .load_super(rdev, rdev0, mddev->minor_version); new_dev_store()
4110 rdev = md_import_device(dev, -2, -1); new_dev_store()
4112 rdev = md_import_device(dev, -1, -1); new_dev_store()
4114 if (IS_ERR(rdev)) { new_dev_store()
4116 return PTR_ERR(rdev); new_dev_store()
4118 err = bind_rdev_to_array(rdev, mddev); new_dev_store()
4121 export_rdev(rdev); new_dev_store()
4725 struct md_rdev *rdev; reshape_position_store() local
4746 rdev_for_each(rdev, mddev) reshape_position_store()
4747 rdev->new_data_offset = rdev->data_offset; reshape_position_store()
5121 struct md_rdev *rdev; md_run() local
5153 rdev_for_each(rdev, mddev) { rdev_for_each()
5154 if (test_bit(Faulty, &rdev->flags)) rdev_for_each()
5156 sync_blockdev(rdev->bdev); rdev_for_each()
5157 invalidate_bdev(rdev->bdev); rdev_for_each()
5163 if (rdev->meta_bdev) { rdev_for_each()
5165 } else if (rdev->data_offset < rdev->sb_start) { rdev_for_each()
5167 rdev->data_offset + mddev->dev_sectors rdev_for_each()
5168 > rdev->sb_start) { rdev_for_each()
5174 if (rdev->sb_start + rdev->sb_size/512 rdev_for_each()
5175 > rdev->data_offset) { rdev_for_each()
5181 sysfs_notify_dirent_safe(rdev->sysfs_state); rdev_for_each()
5221 rdev_for_each(rdev, mddev) rdev_for_each()
5223 if (rdev < rdev2 && rdev_for_each()
5224 rdev->bdev->bd_contains == rdev_for_each()
5231 bdevname(rdev->bdev,b), rdev_for_each()
5314 rdev_for_each(rdev, mddev)
5315 if (rdev->raid_disk >= 0)
5316 if (sysfs_link_rdev(mddev, rdev))
5376 struct md_rdev *rdev; restart_array() local
5380 rdev_for_each_rcu(rdev, mddev) { rdev_for_each_rcu()
5381 if (test_bit(Journal, &rdev->flags) && rdev_for_each_rcu()
5382 !test_bit(Faulty, &rdev->flags)) { rdev_for_each_rcu()
5603 struct md_rdev *rdev; do_md_stop() local
5650 rdev_for_each(rdev, mddev) do_md_stop()
5651 if (rdev->raid_disk >= 0) do_md_stop()
5652 sysfs_unlink_rdev(mddev, rdev); do_md_stop()
5694 struct md_rdev *rdev; autorun_array() local
5702 rdev_for_each(rdev, mddev) { rdev_for_each()
5704 printk("<%s>", bdevname(rdev->bdev,b)); rdev_for_each()
5729 struct md_rdev *rdev0, *rdev, *tmp; autorun_devices() local
5744 rdev_for_each_list(rdev, tmp, &pending_raid_disks) autorun_devices()
5745 if (super_90_load(rdev, rdev0, 0) >= 0) { autorun_devices()
5747 bdevname(rdev->bdev,b)); autorun_devices()
5748 list_move(&rdev->same_set, &candidates); autorun_devices()
5790 rdev_for_each_list(rdev, tmp, &candidates) { autorun_devices()
5791 list_del_init(&rdev->same_set); autorun_devices()
5792 if (bind_rdev_to_array(rdev, mddev)) autorun_devices()
5793 export_rdev(rdev); autorun_devices()
5801 rdev_for_each_list(rdev, tmp, &candidates) { autorun_devices()
5802 list_del_init(&rdev->same_set); autorun_devices()
5803 export_rdev(rdev); autorun_devices()
5829 struct md_rdev *rdev; get_array_info() local
5833 rdev_for_each_rcu(rdev, mddev) { rdev_for_each_rcu()
5835 if (test_bit(Faulty, &rdev->flags)) rdev_for_each_rcu()
5839 if (test_bit(In_sync, &rdev->flags)) rdev_for_each_rcu()
5917 struct md_rdev *rdev; get_disk_info() local
5923 rdev = md_find_rdev_nr_rcu(mddev, info.number); get_disk_info()
5924 if (rdev) { get_disk_info()
5925 info.major = MAJOR(rdev->bdev->bd_dev); get_disk_info()
5926 info.minor = MINOR(rdev->bdev->bd_dev); get_disk_info()
5927 info.raid_disk = rdev->raid_disk; get_disk_info()
5929 if (test_bit(Faulty, &rdev->flags)) get_disk_info()
5931 else if (test_bit(In_sync, &rdev->flags)) { get_disk_info()
5935 if (test_bit(Journal, &rdev->flags)) get_disk_info()
5937 if (test_bit(WriteMostly, &rdev->flags)) get_disk_info()
5955 struct md_rdev *rdev; add_new_disk() local
5971 rdev = md_import_device(dev, mddev->major_version, mddev->minor_version); add_new_disk()
5972 if (IS_ERR(rdev)) { add_new_disk()
5975 PTR_ERR(rdev)); add_new_disk()
5976 return PTR_ERR(rdev); add_new_disk()
5983 .load_super(rdev, rdev0, mddev->minor_version); add_new_disk()
5987 bdevname(rdev->bdev,b), add_new_disk()
5989 export_rdev(rdev); add_new_disk()
5993 err = bind_rdev_to_array(rdev, mddev); add_new_disk()
5995 export_rdev(rdev); add_new_disk()
6013 rdev = md_import_device(dev, mddev->major_version, add_new_disk()
6016 rdev = md_import_device(dev, -1, -1); add_new_disk()
6017 if (IS_ERR(rdev)) { add_new_disk()
6020 PTR_ERR(rdev)); add_new_disk()
6021 return PTR_ERR(rdev); add_new_disk()
6027 rdev->raid_disk = info->raid_disk; add_new_disk()
6028 set_bit(In_sync, &rdev->flags); add_new_disk()
6029 clear_bit(Bitmap_sync, &rdev->flags); add_new_disk()
6031 rdev->raid_disk = -1; add_new_disk()
6032 rdev->saved_raid_disk = rdev->raid_disk; add_new_disk()
6035 validate_super(mddev, rdev); add_new_disk()
6037 rdev->raid_disk != info->raid_disk) { add_new_disk()
6041 export_rdev(rdev); add_new_disk()
6045 clear_bit(In_sync, &rdev->flags); /* just to be sure */ add_new_disk()
6047 set_bit(WriteMostly, &rdev->flags); add_new_disk()
6049 clear_bit(WriteMostly, &rdev->flags); add_new_disk()
6052 set_bit(Journal, &rdev->flags); add_new_disk()
6058 set_bit(Candidate, &rdev->flags); add_new_disk()
6061 err = md_cluster_ops->add_new_disk(mddev, rdev); add_new_disk()
6063 export_rdev(rdev); add_new_disk()
6069 rdev->raid_disk = -1; add_new_disk()
6070 err = bind_rdev_to_array(rdev, mddev); add_new_disk()
6073 export_rdev(rdev); add_new_disk()
6082 err = add_bound_rdev(rdev); add_new_disk()
6086 err = add_bound_rdev(rdev); add_new_disk()
6102 rdev = md_import_device(dev, -1, 0); add_new_disk()
6103 if (IS_ERR(rdev)) { add_new_disk()
6106 PTR_ERR(rdev)); add_new_disk()
6107 return PTR_ERR(rdev); add_new_disk()
6109 rdev->desc_nr = info->number; add_new_disk()
6111 rdev->raid_disk = info->raid_disk; add_new_disk()
6113 rdev->raid_disk = -1; add_new_disk()
6115 if (rdev->raid_disk < mddev->raid_disks) add_new_disk()
6117 set_bit(In_sync, &rdev->flags); add_new_disk()
6120 set_bit(WriteMostly, &rdev->flags); add_new_disk()
6124 rdev->sb_start = i_size_read(rdev->bdev->bd_inode) / 512; add_new_disk()
6126 rdev->sb_start = calc_dev_sboffset(rdev); add_new_disk()
6127 rdev->sectors = rdev->sb_start; add_new_disk()
6129 err = bind_rdev_to_array(rdev, mddev); add_new_disk()
6131 export_rdev(rdev); add_new_disk()
6142 struct md_rdev *rdev; hot_remove_disk() local
6145 rdev = find_rdev(mddev, dev); hot_remove_disk()
6146 if (!rdev) hot_remove_disk()
6152 if (rdev->raid_disk < 0) hot_remove_disk()
6155 clear_bit(Blocked, &rdev->flags); hot_remove_disk()
6156 remove_and_add_spares(mddev, rdev); hot_remove_disk()
6158 if (rdev->raid_disk >= 0) hot_remove_disk()
6163 md_cluster_ops->remove_disk(mddev, rdev); hot_remove_disk()
6165 md_kick_rdev_from_array(rdev); hot_remove_disk()
6175 bdevname(rdev->bdev,b), mdname(mddev)); hot_remove_disk()
6183 struct md_rdev *rdev; hot_add_disk() local
6201 rdev = md_import_device(dev, -1, 0); hot_add_disk()
6202 if (IS_ERR(rdev)) { hot_add_disk()
6205 PTR_ERR(rdev)); hot_add_disk()
6210 rdev->sb_start = calc_dev_sboffset(rdev); hot_add_disk()
6212 rdev->sb_start = i_size_read(rdev->bdev->bd_inode) / 512; hot_add_disk()
6214 rdev->sectors = rdev->sb_start; hot_add_disk()
6216 if (test_bit(Faulty, &rdev->flags)) { hot_add_disk()
6219 bdevname(rdev->bdev,b), mdname(mddev)); hot_add_disk()
6224 clear_bit(In_sync, &rdev->flags); hot_add_disk()
6225 rdev->desc_nr = -1; hot_add_disk()
6226 rdev->saved_raid_disk = -1; hot_add_disk()
6227 err = bind_rdev_to_array(rdev, mddev); hot_add_disk()
6236 rdev->raid_disk = -1; hot_add_disk()
6249 export_rdev(rdev); hot_add_disk()
6431 struct md_rdev *rdev; update_size() local
6452 rdev_for_each(rdev, mddev) { rdev_for_each()
6453 sector_t avail = rdev->sectors; rdev_for_each()
6469 struct md_rdev *rdev; update_raid_disks() local
6483 rdev_for_each(rdev, mddev) { rdev_for_each()
6485 rdev->data_offset < rdev->new_data_offset) rdev_for_each()
6488 rdev->data_offset > rdev->new_data_offset) rdev_for_each()
6629 struct md_rdev *rdev; set_disk_faulty() local
6636 rdev = find_rdev_rcu(mddev, dev); set_disk_faulty()
6637 if (!rdev) set_disk_faulty()
6640 md_error(mddev, rdev); set_disk_faulty()
6641 if (!test_bit(Faulty, &rdev->flags)) set_disk_faulty()
7177 void md_error(struct mddev *mddev, struct md_rdev *rdev) md_error() argument
7179 if (!rdev || test_bit(Faulty, &rdev->flags)) md_error()
7184 mddev->pers->error_handler(mddev,rdev); md_error()
7187 sysfs_notify_dirent_safe(rdev->sysfs_state); md_error()
7202 struct md_rdev *rdev; status_unused() local
7206 list_for_each_entry(rdev, &pending_raid_disks, same_set) { status_unused()
7210 bdevname(rdev->bdev,b)); status_unused()
7385 struct md_rdev *rdev; md_seq_show() local
7418 rdev_for_each_rcu(rdev, mddev) { rdev_for_each_rcu()
7421 bdevname(rdev->bdev,b), rdev->desc_nr); rdev_for_each_rcu()
7422 if (test_bit(WriteMostly, &rdev->flags)) rdev_for_each_rcu()
7424 if (test_bit(Journal, &rdev->flags)) rdev_for_each_rcu()
7426 if (test_bit(Faulty, &rdev->flags)) { rdev_for_each_rcu()
7430 if (rdev->raid_disk < 0) rdev_for_each_rcu()
7432 if (test_bit(Replacement, &rdev->flags)) rdev_for_each_rcu()
7434 sectors += rdev->sectors; rdev_for_each_rcu()
7603 struct md_rdev *rdev; is_mddev_idle() local
7609 rdev_for_each_rcu(rdev, mddev) { rdev_for_each_rcu()
7610 struct gendisk *disk = rdev->bdev->bd_contains->bd_disk; rdev_for_each_rcu()
7636 if (init || curr_events - rdev->last_events > 64) { rdev_for_each_rcu()
7637 rdev->last_events = curr_events; rdev_for_each_rcu()
7767 struct md_rdev *rdev; md_do_sync() local
7878 rdev_for_each_rcu(rdev, mddev)
7879 if (rdev->raid_disk >= 0 &&
7880 !test_bit(Journal, &rdev->flags) &&
7881 !test_bit(Faulty, &rdev->flags) &&
7882 !test_bit(In_sync, &rdev->flags) &&
7883 rdev->recovery_offset < j)
7884 j = rdev->recovery_offset;
8103 rdev_for_each_rcu(rdev, mddev)
8104 if (rdev->raid_disk >= 0 &&
8106 !test_bit(Journal, &rdev->flags) &&
8107 !test_bit(Faulty, &rdev->flags) &&
8108 !test_bit(In_sync, &rdev->flags) &&
8109 rdev->recovery_offset < mddev->curr_resync)
8110 rdev->recovery_offset = mddev->curr_resync;
8143 struct md_rdev *rdev; remove_and_add_spares() local
8147 rdev_for_each(rdev, mddev) rdev_for_each()
8148 if ((this == NULL || rdev == this) && rdev_for_each()
8149 rdev->raid_disk >= 0 && rdev_for_each()
8150 !test_bit(Blocked, &rdev->flags) && rdev_for_each()
8151 (test_bit(Faulty, &rdev->flags) || rdev_for_each()
8152 (!test_bit(In_sync, &rdev->flags) && rdev_for_each()
8153 !test_bit(Journal, &rdev->flags))) && rdev_for_each()
8154 atomic_read(&rdev->nr_pending)==0) { rdev_for_each()
8156 mddev, rdev) == 0) { rdev_for_each()
8157 sysfs_unlink_rdev(mddev, rdev); rdev_for_each()
8158 rdev->raid_disk = -1; rdev_for_each()
8168 rdev_for_each(rdev, mddev) { rdev_for_each()
8169 if (this && this != rdev) rdev_for_each()
8171 if (test_bit(Candidate, &rdev->flags)) rdev_for_each()
8173 if (rdev->raid_disk >= 0 && rdev_for_each()
8174 !test_bit(In_sync, &rdev->flags) && rdev_for_each()
8175 !test_bit(Journal, &rdev->flags) && rdev_for_each()
8176 !test_bit(Faulty, &rdev->flags)) rdev_for_each()
8178 if (rdev->raid_disk >= 0) rdev_for_each()
8180 if (test_bit(Faulty, &rdev->flags)) rdev_for_each()
8182 if (test_bit(Journal, &rdev->flags)) rdev_for_each()
8185 ! (rdev->saved_raid_disk >= 0 && rdev_for_each()
8186 !test_bit(Bitmap_sync, &rdev->flags))) rdev_for_each()
8189 rdev->recovery_offset = 0; rdev_for_each()
8191 hot_add_disk(mddev, rdev) == 0) { rdev_for_each()
8192 if (sysfs_link_rdev(mddev, rdev)) rdev_for_each()
8299 struct md_rdev *rdev; md_check_recovery() local
8306 rdev_for_each(rdev, mddev) md_check_recovery()
8307 clear_bit(Blocked, &rdev->flags); md_check_recovery()
8430 struct md_rdev *rdev; md_reap_sync_thread() local
8452 rdev_for_each(rdev, mddev) md_reap_sync_thread()
8453 rdev->saved_raid_disk = -1; md_reap_sync_thread()
8472 void md_wait_for_blocked_rdev(struct md_rdev *rdev, struct mddev *mddev) md_wait_for_blocked_rdev() argument
8474 sysfs_notify_dirent_safe(rdev->sysfs_state); md_wait_for_blocked_rdev()
8475 wait_event_timeout(rdev->blocked_wait, md_wait_for_blocked_rdev()
8476 !test_bit(Blocked, &rdev->flags) && md_wait_for_blocked_rdev()
8477 !test_bit(BlockedBadBlocks, &rdev->flags), md_wait_for_blocked_rdev()
8479 rdev_dec_pending(rdev, mddev); md_wait_for_blocked_rdev()
8486 struct md_rdev *rdev; md_finish_reshape() local
8488 rdev_for_each(rdev, mddev) { rdev_for_each()
8489 if (rdev->data_offset > rdev->new_data_offset) rdev_for_each()
8490 rdev->sectors += rdev->data_offset - rdev->new_data_offset; rdev_for_each()
8492 rdev->sectors -= rdev->new_data_offset - rdev->data_offset; rdev_for_each()
8493 rdev->data_offset = rdev->new_data_offset; rdev_for_each()
8746 int rdev_set_badblocks(struct md_rdev *rdev, sector_t s, int sectors, rdev_set_badblocks() argument
8751 s += rdev->new_data_offset; rdev_set_badblocks()
8753 s += rdev->data_offset; rdev_set_badblocks()
8754 rv = md_set_badblocks(&rdev->badblocks, rdev_set_badblocks()
8758 sysfs_notify_dirent_safe(rdev->sysfs_state); rdev_set_badblocks()
8759 set_bit(MD_CHANGE_CLEAN, &rdev->mddev->flags); rdev_set_badblocks()
8760 set_bit(MD_CHANGE_PENDING, &rdev->mddev->flags); rdev_set_badblocks()
8761 md_wakeup_thread(rdev->mddev->thread); rdev_set_badblocks()
8862 int rdev_clear_badblocks(struct md_rdev *rdev, sector_t s, int sectors, rdev_clear_badblocks() argument
8866 s += rdev->new_data_offset; rdev_clear_badblocks()
8868 s += rdev->data_offset; rdev_clear_badblocks()
8869 return md_clear_badblocks(&rdev->badblocks, rdev_clear_badblocks()
9078 static void check_sb_changes(struct mddev *mddev, struct md_rdev *rdev) check_sb_changes() argument
9080 struct mdp_superblock_1 *sb = page_address(rdev->sb_page); check_sb_changes()
9131 static int read_rdev(struct mddev *mddev, struct md_rdev *rdev) read_rdev() argument
9134 struct page *swapout = rdev->sb_page; read_rdev()
9137 /* Store the sb page of the rdev in the swapout temporary read_rdev()
9140 rdev->sb_page = NULL; read_rdev()
9141 alloc_disk_sb(rdev); read_rdev()
9142 ClearPageUptodate(rdev->sb_page); read_rdev()
9143 rdev->sb_loaded = 0; read_rdev()
9144 err = super_types[mddev->major_version].load_super(rdev, NULL, mddev->minor_version); read_rdev()
9147 pr_warn("%s: %d Could not reload rdev(%d) err: %d. Restoring old values\n", read_rdev()
9148 __func__, __LINE__, rdev->desc_nr, err); read_rdev()
9149 put_page(rdev->sb_page); read_rdev()
9150 rdev->sb_page = swapout; read_rdev()
9151 rdev->sb_loaded = 1; read_rdev()
9155 sb = page_address(rdev->sb_page); read_rdev()
9161 rdev->recovery_offset = le64_to_cpu(sb->recovery_offset); read_rdev()
9166 if (rdev->recovery_offset == MaxSector && read_rdev()
9167 !test_bit(In_sync, &rdev->flags) && read_rdev()
9177 struct md_rdev *rdev; md_reload_sb() local
9180 /* Find the rdev */ rdev_for_each_rcu()
9181 rdev_for_each_rcu(rdev, mddev) { rdev_for_each_rcu()
9182 if (rdev->desc_nr == nr) rdev_for_each_rcu()
9186 if (!rdev || rdev->desc_nr != nr) {
9187 pr_warn("%s: %d Could not find rdev with nr %d\n", __func__, __LINE__, nr);
9191 err = read_rdev(mddev, rdev);
9195 check_sb_changes(mddev, rdev);
9197 /* Read all rdev's to update recovery_offset */
9198 rdev_for_each_rcu(rdev, mddev)
9199 read_rdev(mddev, rdev);
9232 struct md_rdev *rdev; autostart_arrays() local
9249 rdev = md_import_device(dev,0, 90); autostart_arrays()
9250 if (IS_ERR(rdev)) autostart_arrays()
9253 if (test_bit(Faulty, &rdev->flags)) autostart_arrays()
9256 set_bit(AutoDetected, &rdev->flags); autostart_arrays()
9257 list_add(&rdev->same_set, &pending_raid_disks); autostart_arrays()
H A Dmultipath.c45 struct md_rdev *rdev = rcu_dereference(conf->multipaths[i].rdev); multipath_map() local
46 if (rdev && test_bit(In_sync, &rdev->flags)) { multipath_map()
47 atomic_inc(&rdev->nr_pending); multipath_map()
89 struct md_rdev *rdev = conf->multipaths[mp_bh->path].rdev; multipath_end_request() local
98 md_error (mp_bh->mddev, rdev); multipath_end_request()
100 bdevname(rdev->bdev,b), multipath_end_request()
105 rdev_dec_pending(rdev, conf->mddev); multipath_end_request()
135 mp_bh->bio.bi_iter.bi_sector += multipath->rdev->data_offset; multipath_make_request()
136 mp_bh->bio.bi_bdev = multipath->rdev->bdev; multipath_make_request()
153 conf->multipaths[i].rdev && multipath_status()
154 test_bit(In_sync, &conf->multipaths[i].rdev->flags) ? "U" : "_"); multipath_status()
165 struct md_rdev *rdev = rcu_dereference(conf->multipaths[i].rdev); multipath_congested() local
166 if (rdev && !test_bit(Faulty, &rdev->flags)) { multipath_congested()
167 struct request_queue *q = bdev_get_queue(rdev->bdev); multipath_congested()
183 static void multipath_error (struct mddev *mddev, struct md_rdev *rdev) multipath_error() argument
202 if (test_and_clear_bit(In_sync, &rdev->flags)) { multipath_error()
208 set_bit(Faulty, &rdev->flags); multipath_error()
214 bdevname(rdev->bdev, b), multipath_error()
234 if (tmp->rdev) print_multipath_conf()
236 i,!test_bit(Faulty, &tmp->rdev->flags), print_multipath_conf()
237 bdevname(tmp->rdev->bdev,b)); print_multipath_conf()
241 static int multipath_add_disk(struct mddev *mddev, struct md_rdev *rdev) multipath_add_disk() argument
251 if (rdev->raid_disk >= 0) multipath_add_disk()
252 first = last = rdev->raid_disk; multipath_add_disk()
257 if ((p=conf->multipaths+path)->rdev == NULL) { multipath_add_disk()
258 q = rdev->bdev->bd_disk->queue; multipath_add_disk()
259 disk_stack_limits(mddev->gendisk, rdev->bdev, multipath_add_disk()
260 rdev->data_offset << 9); multipath_add_disk()
262 err = md_integrity_add_rdev(rdev, mddev); multipath_add_disk()
267 rdev->raid_disk = path; multipath_add_disk()
268 set_bit(In_sync, &rdev->flags); multipath_add_disk()
270 rcu_assign_pointer(p->rdev, rdev); multipath_add_disk()
280 static int multipath_remove_disk(struct mddev *mddev, struct md_rdev *rdev) multipath_remove_disk() argument
284 int number = rdev->raid_disk; multipath_remove_disk()
289 if (rdev == p->rdev) { multipath_remove_disk()
290 if (test_bit(In_sync, &rdev->flags) || multipath_remove_disk()
291 atomic_read(&rdev->nr_pending)) { multipath_remove_disk()
297 p->rdev = NULL; multipath_remove_disk()
299 if (atomic_read(&rdev->nr_pending)) { multipath_remove_disk()
302 p->rdev = rdev; multipath_remove_disk()
356 conf->multipaths[mp_bh->path].rdev->data_offset; multipathd()
357 bio->bi_bdev = conf->multipaths[mp_bh->path].rdev->bdev; multipathd()
380 struct md_rdev *rdev; multipath_run() local
416 rdev_for_each(rdev, mddev) { rdev_for_each()
417 disk_idx = rdev->raid_disk; rdev_for_each()
423 disk->rdev = rdev; rdev_for_each()
424 disk_stack_limits(mddev->gendisk, rdev->bdev, rdev_for_each()
425 rdev->data_offset << 9); rdev_for_each()
427 if (!test_bit(Faulty, &rdev->flags)) rdev_for_each()
H A Dmd-cluster.h22 int (*add_new_disk)(struct mddev *mddev, struct md_rdev *rdev);
25 int (*remove_disk)(struct mddev *mddev, struct md_rdev *rdev);
26 int (*gather_bitmaps)(struct md_rdev *rdev);
H A Draid1.c209 rdev_dec_pending(conf->mirrors[i].rdev, r1_bio->mddev); put_buf()
342 test_bit(In_sync, &conf->mirrors[mirror].rdev->flags))) raid1_end_read_request()
349 rdev_dec_pending(conf->mirrors[mirror].rdev, conf->mddev); raid1_end_read_request()
359 bdevname(conf->mirrors[mirror].rdev->bdev, raid1_end_read_request()
417 &conf->mirrors[mirror].rdev->flags); raid1_end_write_request()
419 &conf->mirrors[mirror].rdev->flags)) raid1_end_write_request()
445 * before rdev->recovery_offset, but for simplicity we don't raid1_end_write_request()
448 if (test_bit(In_sync, &conf->mirrors[mirror].rdev->flags) && raid1_end_write_request()
449 !test_bit(Faulty, &conf->mirrors[mirror].rdev->flags)) raid1_end_write_request()
453 if (is_badblock(conf->mirrors[mirror].rdev, raid1_end_write_request()
462 if (test_bit(WriteMostly, &conf->mirrors[mirror].rdev->flags)) raid1_end_write_request()
486 rdev_dec_pending(conf->mirrors[mirror].rdev, raid1_end_write_request()
511 * The rdev for the device selected will have nr_pending incremented.
523 struct md_rdev *rdev; read_balance() local
559 rdev = rcu_dereference(conf->mirrors[disk].rdev); read_balance()
561 || rdev == NULL read_balance()
562 || test_bit(Faulty, &rdev->flags)) read_balance()
564 if (!test_bit(In_sync, &rdev->flags) && read_balance()
565 rdev->recovery_offset < this_sector + sectors) read_balance()
567 if (test_bit(WriteMostly, &rdev->flags)) { read_balance()
571 if (is_badblock(rdev, this_sector, sectors, read_balance()
587 if (is_badblock(rdev, this_sector, sectors, read_balance()
616 nonrot = blk_queue_nonrot(bdev_get_queue(rdev->bdev)); read_balance()
618 pending = atomic_read(&rdev->nr_pending); read_balance()
627 int opt_iosize = bdev_io_opt(rdev->bdev) >> 9; read_balance()
688 rdev = rcu_dereference(conf->mirrors[best_disk].rdev); read_balance()
689 if (!rdev) read_balance()
691 atomic_inc(&rdev->nr_pending); read_balance()
692 if (test_bit(Faulty, &rdev->flags)) { read_balance()
696 rdev_dec_pending(rdev, conf->mddev); read_balance()
723 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); raid1_congested() local
724 if (rdev && !test_bit(Faulty, &rdev->flags)) { raid1_congested()
725 struct request_queue *q = bdev_get_queue(rdev->bdev); raid1_congested()
1147 if (test_bit(WriteMostly, &mirror->rdev->flags) && make_request()
1166 mirror->rdev->data_offset; make_request()
1167 read_bio->bi_bdev = mirror->rdev->bdev; make_request()
1216 * inc refcount on their rdev. Record them by setting make_request()
1233 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); make_request() local
1234 if (rdev && unlikely(test_bit(Blocked, &rdev->flags))) { make_request()
1235 atomic_inc(&rdev->nr_pending); make_request()
1236 blocked_rdev = rdev; make_request()
1240 if (!rdev || test_bit(Faulty, &rdev->flags)) { make_request()
1246 atomic_inc(&rdev->nr_pending); make_request()
1247 if (test_bit(WriteErrorSeen, &rdev->flags)) { make_request()
1252 is_bad = is_badblock(rdev, r1_bio->sector, make_request()
1258 set_bit(BlockedBadBlocks, &rdev->flags); make_request()
1259 blocked_rdev = rdev; make_request()
1270 rdev_dec_pending(rdev, mddev); make_request()
1300 rdev_dec_pending(conf->mirrors[j].rdev, mddev); make_request()
1369 if (test_bit(WriteMostly, &conf->mirrors[i].rdev->flags)) make_request()
1376 conf->mirrors[i].rdev->data_offset); make_request()
1377 mbio->bi_bdev = conf->mirrors[i].rdev->bdev; make_request()
1434 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); status() local
1436 rdev && test_bit(In_sync, &rdev->flags) ? "U" : "_"); status()
1442 static void error(struct mddev *mddev, struct md_rdev *rdev) error() argument
1454 if (test_bit(In_sync, &rdev->flags) error()
1465 set_bit(Blocked, &rdev->flags); error()
1467 if (test_and_clear_bit(In_sync, &rdev->flags)) { error()
1469 set_bit(Faulty, &rdev->flags); error()
1471 set_bit(Faulty, &rdev->flags); error()
1482 mdname(mddev), bdevname(rdev->bdev, b), error()
1501 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); print_conf() local
1502 if (rdev) print_conf()
1504 i, !test_bit(In_sync, &rdev->flags), print_conf()
1505 !test_bit(Faulty, &rdev->flags), print_conf()
1506 bdevname(rdev->bdev,b)); print_conf()
1544 struct md_rdev *rdev = conf->mirrors[i].rdev; raid1_spare_active() local
1545 struct md_rdev *repl = conf->mirrors[conf->raid_disks + i].rdev; raid1_spare_active()
1552 if (!rdev || raid1_spare_active()
1553 !test_and_clear_bit(In_sync, &rdev->flags)) raid1_spare_active()
1555 if (rdev) { raid1_spare_active()
1560 set_bit(Faulty, &rdev->flags); raid1_spare_active()
1562 rdev->sysfs_state); raid1_spare_active()
1565 if (rdev raid1_spare_active()
1566 && rdev->recovery_offset == MaxSector raid1_spare_active()
1567 && !test_bit(Faulty, &rdev->flags) raid1_spare_active()
1568 && !test_and_set_bit(In_sync, &rdev->flags)) { raid1_spare_active()
1570 sysfs_notify_dirent_safe(rdev->sysfs_state); raid1_spare_active()
1580 static int raid1_add_disk(struct mddev *mddev, struct md_rdev *rdev) raid1_add_disk() argument
1592 if (md_integrity_add_rdev(rdev, mddev)) raid1_add_disk()
1595 if (rdev->raid_disk >= 0) raid1_add_disk()
1596 first = last = rdev->raid_disk; raid1_add_disk()
1599 * find the disk ... but prefer rdev->saved_raid_disk raid1_add_disk()
1602 if (rdev->saved_raid_disk >= 0 && raid1_add_disk()
1603 rdev->saved_raid_disk >= first && raid1_add_disk()
1604 conf->mirrors[rdev->saved_raid_disk].rdev == NULL) raid1_add_disk()
1605 first = last = rdev->saved_raid_disk; raid1_add_disk()
1609 if (!p->rdev) { raid1_add_disk()
1612 disk_stack_limits(mddev->gendisk, rdev->bdev, raid1_add_disk()
1613 rdev->data_offset << 9); raid1_add_disk()
1616 rdev->raid_disk = mirror; raid1_add_disk()
1621 if (rdev->saved_raid_disk < 0) raid1_add_disk()
1623 rcu_assign_pointer(p->rdev, rdev); raid1_add_disk()
1626 if (test_bit(WantReplacement, &p->rdev->flags) && raid1_add_disk()
1627 p[conf->raid_disks].rdev == NULL) { raid1_add_disk()
1629 clear_bit(In_sync, &rdev->flags); raid1_add_disk()
1630 set_bit(Replacement, &rdev->flags); raid1_add_disk()
1631 rdev->raid_disk = mirror; raid1_add_disk()
1634 rcu_assign_pointer(p[conf->raid_disks].rdev, rdev); raid1_add_disk()
1638 if (mddev->queue && blk_queue_discard(bdev_get_queue(rdev->bdev))) raid1_add_disk()
1644 static int raid1_remove_disk(struct mddev *mddev, struct md_rdev *rdev) raid1_remove_disk() argument
1648 int number = rdev->raid_disk; raid1_remove_disk()
1651 if (rdev != p->rdev) raid1_remove_disk()
1655 if (rdev == p->rdev) { raid1_remove_disk()
1656 if (test_bit(In_sync, &rdev->flags) || raid1_remove_disk()
1657 atomic_read(&rdev->nr_pending)) { raid1_remove_disk()
1664 if (!test_bit(Faulty, &rdev->flags) && raid1_remove_disk()
1670 p->rdev = NULL; raid1_remove_disk()
1672 if (atomic_read(&rdev->nr_pending)) { raid1_remove_disk()
1675 p->rdev = rdev; raid1_remove_disk()
1677 } else if (conf->mirrors[conf->raid_disks + number].rdev) { raid1_remove_disk()
1683 conf->mirrors[conf->raid_disks + number].rdev; raid1_remove_disk()
1686 p->rdev = repl; raid1_remove_disk()
1687 conf->mirrors[conf->raid_disks + number].rdev = NULL; raid1_remove_disk()
1689 clear_bit(WantReplacement, &rdev->flags); raid1_remove_disk()
1691 clear_bit(WantReplacement, &rdev->flags); raid1_remove_disk()
1742 &conf->mirrors[mirror].rdev->flags); end_sync_write()
1744 &conf->mirrors[mirror].rdev->flags)) end_sync_write()
1748 } else if (is_badblock(conf->mirrors[mirror].rdev, end_sync_write()
1752 !is_badblock(conf->mirrors[r1_bio->read_disk].rdev, end_sync_write()
1771 static int r1_sync_page_io(struct md_rdev *rdev, sector_t sector, r1_sync_page_io() argument
1774 if (sync_page_io(rdev, sector, sectors << 9, page, rw, false)) r1_sync_page_io()
1778 set_bit(WriteErrorSeen, &rdev->flags); r1_sync_page_io()
1780 &rdev->flags)) r1_sync_page_io()
1782 rdev->mddev->recovery); r1_sync_page_io()
1785 if (!rdev_set_badblocks(rdev, sector, sectors, 0)) r1_sync_page_io()
1786 md_error(rdev->mddev, rdev); r1_sync_page_io()
1814 struct md_rdev *rdev; fix_sync_read_error() local
1825 rdev = conf->mirrors[d].rdev; fix_sync_read_error()
1826 if (sync_page_io(rdev, sect, s<<9, fix_sync_read_error()
1852 rdev = conf->mirrors[d].rdev; fix_sync_read_error()
1853 if (!rdev || test_bit(Faulty, &rdev->flags)) fix_sync_read_error()
1855 if (!rdev_set_badblocks(rdev, sect, s, 0)) fix_sync_read_error()
1881 rdev = conf->mirrors[d].rdev; fix_sync_read_error()
1882 if (r1_sync_page_io(rdev, sect, s, fix_sync_read_error()
1886 rdev_dec_pending(rdev, mddev); fix_sync_read_error()
1896 rdev = conf->mirrors[d].rdev; fix_sync_read_error()
1897 if (r1_sync_page_io(rdev, sect, s, fix_sync_read_error()
1900 atomic_add(s, &rdev->corrected_errors); fix_sync_read_error()
1942 conf->mirrors[i].rdev->data_offset; process_checks()
1943 b->bi_bdev = conf->mirrors[i].rdev->bdev; process_checks()
1963 rdev_dec_pending(conf->mirrors[primary].rdev, mddev); process_checks()
1996 rdev_dec_pending(conf->mirrors[i].rdev, mddev); process_checks()
2036 md_sync_acct(conf->mirrors[i].rdev->bdev, bio_sectors(wbio)); sync_request_write()
2071 struct md_rdev *rdev; fix_read_error() local
2085 rdev = conf->mirrors[d].rdev; fix_read_error()
2086 if (rdev && fix_read_error()
2087 (test_bit(In_sync, &rdev->flags) || fix_read_error()
2088 (!test_bit(Faulty, &rdev->flags) && fix_read_error()
2089 rdev->recovery_offset >= sect + s)) && fix_read_error()
2090 is_badblock(rdev, sect, s, fix_read_error()
2092 sync_page_io(rdev, sect, s<<9, fix_read_error()
2104 struct md_rdev *rdev = conf->mirrors[read_disk].rdev; fix_read_error() local
2105 if (!rdev_set_badblocks(rdev, sect, s, 0)) fix_read_error()
2106 md_error(mddev, rdev); fix_read_error()
2115 rdev = conf->mirrors[d].rdev; fix_read_error()
2116 if (rdev && fix_read_error()
2117 !test_bit(Faulty, &rdev->flags)) fix_read_error()
2118 r1_sync_page_io(rdev, sect, s, fix_read_error()
2127 rdev = conf->mirrors[d].rdev; fix_read_error()
2128 if (rdev && fix_read_error()
2129 !test_bit(Faulty, &rdev->flags)) { fix_read_error()
2130 if (r1_sync_page_io(rdev, sect, s, fix_read_error()
2132 atomic_add(s, &rdev->corrected_errors); fix_read_error()
2138 rdev->data_offset), fix_read_error()
2139 bdevname(rdev->bdev, b)); fix_read_error()
2152 struct md_rdev *rdev = conf->mirrors[i].rdev; narrow_write_error() local
2162 * We currently own a reference on the rdev. narrow_write_error()
2171 if (rdev->badblocks.shift < 0) narrow_write_error()
2174 block_sectors = roundup(1 << rdev->badblocks.shift, narrow_write_error()
2175 bdev_logical_block_size(rdev->bdev) >> 9); narrow_write_error()
2209 wbio->bi_iter.bi_sector += rdev->data_offset; narrow_write_error()
2210 wbio->bi_bdev = rdev->bdev; narrow_write_error()
2213 ok = rdev_set_badblocks(rdev, sector, narrow_write_error()
2230 struct md_rdev *rdev = conf->mirrors[m].rdev; handle_sync_write_finished() local
2236 rdev_clear_badblocks(rdev, r1_bio->sector, s, 0); handle_sync_write_finished()
2240 if (!rdev_set_badblocks(rdev, r1_bio->sector, s, 0)) handle_sync_write_finished()
2241 md_error(conf->mddev, rdev); handle_sync_write_finished()
2254 struct md_rdev *rdev = conf->mirrors[m].rdev; handle_write_finished() local
2255 rdev_clear_badblocks(rdev, handle_write_finished()
2258 rdev_dec_pending(rdev, conf->mddev); handle_write_finished()
2267 conf->mirrors[m].rdev); handle_write_finished()
2271 rdev_dec_pending(conf->mirrors[m].rdev, handle_write_finished()
2294 struct md_rdev *rdev; handle_read_error() local
2311 md_error(mddev, conf->mirrors[r1_bio->read_disk].rdev); handle_read_error()
2312 rdev_dec_pending(conf->mirrors[r1_bio->read_disk].rdev, conf->mddev); handle_read_error()
2336 rdev = conf->mirrors[disk].rdev; handle_read_error()
2342 bdevname(rdev->bdev, b)); handle_read_error()
2343 bio->bi_iter.bi_sector = r1_bio->sector + rdev->data_offset; handle_read_error()
2344 bio->bi_bdev = rdev->bdev; handle_read_error()
2563 struct md_rdev *rdev; sync_request() local
2567 rdev = rcu_dereference(conf->mirrors[i].rdev); sync_request()
2568 if (rdev == NULL || sync_request()
2569 test_bit(Faulty, &rdev->flags)) { sync_request()
2572 } else if (!test_bit(In_sync, &rdev->flags)) { sync_request()
2581 if (is_badblock(rdev, sector_nr, good_sectors, sync_request()
2593 if (test_bit(WriteMostly, &rdev->flags)) { sync_request()
2603 } else if (!test_bit(WriteErrorSeen, &rdev->flags) && sync_request()
2618 atomic_inc(&rdev->nr_pending); sync_request()
2619 bio->bi_iter.bi_sector = sector_nr + rdev->data_offset; sync_request()
2620 bio->bi_bdev = rdev->bdev; sync_request()
2636 struct md_rdev *rdev = conf->mirrors[i].rdev; sync_request() local
2637 ok = rdev_set_badblocks(rdev, sector_nr, sync_request()
2779 struct md_rdev *rdev; setup_conf() local
2810 rdev_for_each(rdev, mddev) { rdev_for_each()
2812 int disk_idx = rdev->raid_disk; rdev_for_each()
2816 if (test_bit(Replacement, &rdev->flags)) rdev_for_each()
2821 if (disk->rdev) rdev_for_each()
2823 disk->rdev = rdev; rdev_for_each()
2824 q = bdev_get_queue(rdev->bdev); rdev_for_each()
2850 disk[conf->raid_disks].rdev) {
2852 if (!disk->rdev) {
2856 disk->rdev =
2857 disk[conf->raid_disks].rdev;
2858 disk[conf->raid_disks].rdev = NULL;
2859 } else if (!test_bit(In_sync, &disk->rdev->flags))
2864 if (!disk->rdev ||
2865 !test_bit(In_sync, &disk->rdev->flags)) {
2867 if (disk->rdev &&
2868 (disk->rdev->saved_raid_disk < 0))
2900 struct md_rdev *rdev; run() local
2930 rdev_for_each(rdev, mddev) { rdev_for_each()
2933 disk_stack_limits(mddev->gendisk, rdev->bdev, rdev_for_each()
2934 rdev->data_offset << 9); rdev_for_each()
2935 if (blk_queue_discard(bdev_get_queue(rdev->bdev))) rdev_for_each()
2941 if (conf->mirrors[i].rdev == NULL ||
2942 !test_bit(In_sync, &conf->mirrors[i].rdev->flags) ||
2943 test_bit(Faulty, &conf->mirrors[i].rdev->flags))
3068 if (conf->mirrors[d].rdev) raid1_reshape()
3101 struct md_rdev *rdev = conf->mirrors[d].rdev; raid1_reshape() local
3102 if (rdev && rdev->raid_disk != d2) { raid1_reshape()
3103 sysfs_unlink_rdev(mddev, rdev); raid1_reshape()
3104 rdev->raid_disk = d2; raid1_reshape()
3105 sysfs_unlink_rdev(mddev, rdev); raid1_reshape()
3106 if (sysfs_link_rdev(mddev, rdev)) raid1_reshape()
3109 mdname(mddev), rdev->raid_disk); raid1_reshape()
3111 if (rdev) raid1_reshape()
3112 newmirrors[d2++].rdev = rdev; raid1_reshape()
H A Draid10.c367 struct md_rdev *rdev; raid10_end_read_request() local
372 rdev = r10_bio->devs[slot].rdev; raid10_end_read_request()
396 rdev->raid_disk)) raid10_end_read_request()
401 rdev_dec_pending(rdev, conf->mddev); raid10_end_read_request()
404 * oops, read error - keep the refcount on the rdev raid10_end_read_request()
410 bdevname(rdev->bdev, b), raid10_end_read_request()
449 struct md_rdev *rdev = NULL; raid10_end_write_request() local
454 rdev = conf->mirrors[dev].replacement; raid10_end_write_request()
455 if (!rdev) { raid10_end_write_request()
458 rdev = conf->mirrors[dev].rdev; raid10_end_write_request()
468 md_error(rdev->mddev, rdev); raid10_end_write_request()
470 set_bit(WriteErrorSeen, &rdev->flags); raid10_end_write_request()
471 if (!test_and_set_bit(WantReplacement, &rdev->flags)) raid10_end_write_request()
473 &rdev->mddev->recovery); raid10_end_write_request()
495 * before rdev->recovery_offset, but for simplicity we don't raid10_end_write_request()
498 if (test_bit(In_sync, &rdev->flags) && raid10_end_write_request()
499 !test_bit(Faulty, &rdev->flags)) raid10_end_write_request()
503 if (is_badblock(rdev, raid10_end_write_request()
524 rdev_dec_pending(rdev, conf->mddev); raid10_end_write_request()
687 * The rdev for the device selected will have nr_pending incremented.
703 struct md_rdev *best_rdev, *rdev = NULL; read_balance() local
735 rdev = rcu_dereference(conf->mirrors[disk].replacement); read_balance()
736 if (rdev == NULL || test_bit(Faulty, &rdev->flags) || read_balance()
737 r10_bio->devs[slot].addr + sectors > rdev->recovery_offset) read_balance()
738 rdev = rcu_dereference(conf->mirrors[disk].rdev); read_balance()
739 if (rdev == NULL || read_balance()
740 test_bit(Faulty, &rdev->flags)) read_balance()
742 if (!test_bit(In_sync, &rdev->flags) && read_balance()
743 r10_bio->devs[slot].addr + sectors > rdev->recovery_offset) read_balance()
747 if (is_badblock(rdev, dev_sector, sectors, read_balance()
768 best_rdev = rdev; read_balance()
785 if (geo->near_copies > 1 && !atomic_read(&rdev->nr_pending)) read_balance()
797 best_rdev = rdev; read_balance()
802 rdev = best_rdev; read_balance()
806 atomic_inc(&rdev->nr_pending); read_balance()
807 if (test_bit(Faulty, &rdev->flags)) { read_balance()
811 rdev_dec_pending(rdev, conf->mddev); read_balance()
816 rdev = NULL; read_balance()
820 return rdev; read_balance()
837 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); raid10_congested() local
838 if (rdev && !test_bit(Faulty, &rdev->flags)) { raid10_congested()
839 struct request_queue *q = bdev_get_queue(rdev->bdev); raid10_congested()
1002 struct md_rdev *rdev) choose_data_offset()
1004 if (!test_bit(MD_RECOVERY_RESHAPE, &rdev->mddev->recovery) || choose_data_offset()
1006 return rdev->data_offset; choose_data_offset()
1008 return rdev->new_data_offset; choose_data_offset()
1137 struct md_rdev *rdev; __make_request() local
1141 rdev = read_balance(conf, r10_bio, &max_sectors); __make_request()
1142 if (!rdev) { __make_request()
1153 r10_bio->devs[slot].rdev = rdev; __make_request()
1156 choose_data_offset(r10_bio, rdev); __make_request()
1157 read_bio->bi_bdev = rdev->bdev; __make_request()
1205 * inc refcount on their rdev. Record them by setting __make_request()
1225 struct md_rdev *rdev = rcu_dereference(conf->mirrors[d].rdev); __make_request() local
1228 if (rdev == rrdev) __make_request()
1230 if (rdev && unlikely(test_bit(Blocked, &rdev->flags))) { __make_request()
1231 atomic_inc(&rdev->nr_pending); __make_request()
1232 blocked_rdev = rdev; __make_request()
1240 if (rdev && (test_bit(Faulty, &rdev->flags))) __make_request()
1241 rdev = NULL; __make_request()
1248 if (!rdev && !rrdev) { __make_request()
1252 if (rdev && test_bit(WriteErrorSeen, &rdev->flags)) { __make_request()
1258 is_bad = is_badblock(rdev, dev_sector, __make_request()
1265 atomic_inc(&rdev->nr_pending); __make_request()
1266 set_bit(BlockedBadBlocks, &rdev->flags); __make_request()
1267 blocked_rdev = rdev; __make_request()
1294 if (rdev) { __make_request()
1296 atomic_inc(&rdev->nr_pending); __make_request()
1313 rdev_dec_pending(conf->mirrors[d].rdev, mddev); __make_request()
1316 struct md_rdev *rdev; __make_request() local
1318 rdev = conf->mirrors[d].replacement; __make_request()
1319 if (!rdev) { __make_request()
1322 rdev = conf->mirrors[d].rdev; __make_request()
1324 rdev_dec_pending(rdev, mddev); __make_request()
1355 struct md_rdev *rdev = conf->mirrors[d].rdev; __make_request() local
1363 rdev)); __make_request()
1364 mbio->bi_bdev = rdev->bdev; __make_request()
1393 struct md_rdev *rdev = conf->mirrors[d].replacement; __make_request() local
1394 if (rdev == NULL) { __make_request()
1395 /* Replacement just got moved to main 'rdev' */ __make_request()
1397 rdev = conf->mirrors[d].rdev; __make_request()
1406 r10_bio, rdev)); __make_request()
1407 mbio->bi_bdev = rdev->bdev; __make_request()
1508 conf->mirrors[i].rdev && status()
1509 test_bit(In_sync, &conf->mirrors[i].rdev->flags) ? "U" : "_"); status()
1537 struct md_rdev *rdev; _enough() local
1539 (rdev = rcu_dereference(conf->mirrors[this].rdev)) && _enough()
1540 test_bit(In_sync, &rdev->flags)) _enough()
1565 static void error(struct mddev *mddev, struct md_rdev *rdev) error() argument
1578 if (test_bit(In_sync, &rdev->flags) error()
1579 && !enough(conf, rdev->raid_disk)) { error()
1586 if (test_and_clear_bit(In_sync, &rdev->flags)) error()
1592 set_bit(Blocked, &rdev->flags); error()
1593 set_bit(Faulty, &rdev->flags); error()
1600 mdname(mddev), bdevname(rdev->bdev, b), error()
1620 if (tmp->rdev) print_conf()
1622 i, !test_bit(In_sync, &tmp->rdev->flags), print_conf()
1623 !test_bit(Faulty, &tmp->rdev->flags), print_conf()
1624 bdevname(tmp->rdev->bdev,b)); print_conf()
1656 if (!tmp->rdev raid10_spare_active()
1657 || !test_and_clear_bit(In_sync, &tmp->rdev->flags)) raid10_spare_active()
1659 if (tmp->rdev) { raid10_spare_active()
1664 set_bit(Faulty, &tmp->rdev->flags); raid10_spare_active()
1666 tmp->rdev->sysfs_state); raid10_spare_active()
1669 } else if (tmp->rdev raid10_spare_active()
1670 && tmp->rdev->recovery_offset == MaxSector raid10_spare_active()
1671 && !test_bit(Faulty, &tmp->rdev->flags) raid10_spare_active()
1672 && !test_and_set_bit(In_sync, &tmp->rdev->flags)) { raid10_spare_active()
1674 sysfs_notify_dirent_safe(tmp->rdev->sysfs_state); raid10_spare_active()
1685 static int raid10_add_disk(struct mddev *mddev, struct md_rdev *rdev) raid10_add_disk() argument
1698 if (rdev->saved_raid_disk < 0 && !_enough(conf, 1, -1)) raid10_add_disk()
1701 if (md_integrity_add_rdev(rdev, mddev)) raid10_add_disk()
1704 if (rdev->raid_disk >= 0) raid10_add_disk()
1705 first = last = rdev->raid_disk; raid10_add_disk()
1707 if (rdev->saved_raid_disk >= first && raid10_add_disk()
1708 conf->mirrors[rdev->saved_raid_disk].rdev == NULL) raid10_add_disk()
1709 mirror = rdev->saved_raid_disk; raid10_add_disk()
1716 if (p->rdev) { raid10_add_disk()
1717 if (!test_bit(WantReplacement, &p->rdev->flags) || raid10_add_disk()
1720 clear_bit(In_sync, &rdev->flags); raid10_add_disk()
1721 set_bit(Replacement, &rdev->flags); raid10_add_disk()
1722 rdev->raid_disk = mirror; raid10_add_disk()
1725 disk_stack_limits(mddev->gendisk, rdev->bdev, raid10_add_disk()
1726 rdev->data_offset << 9); raid10_add_disk()
1728 rcu_assign_pointer(p->replacement, rdev); raid10_add_disk()
1733 disk_stack_limits(mddev->gendisk, rdev->bdev, raid10_add_disk()
1734 rdev->data_offset << 9); raid10_add_disk()
1738 rdev->raid_disk = mirror; raid10_add_disk()
1740 if (rdev->saved_raid_disk != mirror) raid10_add_disk()
1742 rcu_assign_pointer(p->rdev, rdev); raid10_add_disk()
1745 if (mddev->queue && blk_queue_discard(bdev_get_queue(rdev->bdev))) raid10_add_disk()
1752 static int raid10_remove_disk(struct mddev *mddev, struct md_rdev *rdev) raid10_remove_disk() argument
1756 int number = rdev->raid_disk; raid10_remove_disk()
1761 if (rdev == p->rdev) raid10_remove_disk()
1762 rdevp = &p->rdev; raid10_remove_disk()
1763 else if (rdev == p->replacement) raid10_remove_disk()
1768 if (test_bit(In_sync, &rdev->flags) || raid10_remove_disk()
1769 atomic_read(&rdev->nr_pending)) { raid10_remove_disk()
1776 if (!test_bit(Faulty, &rdev->flags) && raid10_remove_disk()
1778 (!p->replacement || p->replacement == rdev) && raid10_remove_disk()
1786 if (atomic_read(&rdev->nr_pending)) { raid10_remove_disk()
1789 *rdevp = rdev; raid10_remove_disk()
1792 /* We must have just cleared 'rdev' */ raid10_remove_disk()
1793 p->rdev = p->replacement; raid10_remove_disk()
1799 clear_bit(WantReplacement, &rdev->flags); raid10_remove_disk()
1804 clear_bit(WantReplacement, &rdev->flags); raid10_remove_disk()
1833 &conf->mirrors[d].rdev->corrected_errors); end_sync_read()
1838 rdev_dec_pending(conf->mirrors[d].rdev, conf->mddev); end_sync_read()
1885 struct md_rdev *rdev = NULL; end_sync_write() local
1889 rdev = conf->mirrors[d].replacement; end_sync_write()
1891 rdev = conf->mirrors[d].rdev; end_sync_write()
1895 md_error(mddev, rdev); end_sync_write()
1897 set_bit(WriteErrorSeen, &rdev->flags); end_sync_write()
1898 if (!test_and_set_bit(WantReplacement, &rdev->flags)) end_sync_write()
1900 &rdev->mddev->recovery); end_sync_write()
1903 } else if (is_badblock(rdev, end_sync_write()
1909 rdev_dec_pending(rdev, mddev); end_sync_write()
2003 atomic_inc(&conf->mirrors[d].rdev->nr_pending); sync_request_write()
2005 md_sync_acct(conf->mirrors[d].rdev->bdev, bio_sectors(tbio)); sync_request_write()
2007 tbio->bi_iter.bi_sector += conf->mirrors[d].rdev->data_offset; sync_request_write()
2008 tbio->bi_bdev = conf->mirrors[d].rdev->bdev; sync_request_write()
2068 struct md_rdev *rdev; fix_recovery_read_error() local
2075 rdev = conf->mirrors[dr].rdev; fix_recovery_read_error()
2077 ok = sync_page_io(rdev, fix_recovery_read_error()
2083 rdev = conf->mirrors[dw].rdev; fix_recovery_read_error()
2085 ok = sync_page_io(rdev, fix_recovery_read_error()
2091 set_bit(WriteErrorSeen, &rdev->flags); fix_recovery_read_error()
2093 &rdev->flags)) fix_recovery_read_error()
2095 &rdev->mddev->recovery); fix_recovery_read_error()
2103 rdev_set_badblocks(rdev, addr, s, 0); fix_recovery_read_error()
2105 if (rdev != conf->mirrors[dw].rdev) { fix_recovery_read_error()
2107 struct md_rdev *rdev2 = conf->mirrors[dw].rdev; fix_recovery_read_error()
2158 atomic_inc(&conf->mirrors[d].rdev->nr_pending); recovery_request_write()
2159 md_sync_acct(conf->mirrors[d].rdev->bdev, bio_sectors(wbio)); recovery_request_write()
2171 * Used by fix_read_error() to decay the per rdev read_errors.
2176 static void check_decay_read_errors(struct mddev *mddev, struct md_rdev *rdev) check_decay_read_errors() argument
2180 unsigned int read_errors = atomic_read(&rdev->read_errors); check_decay_read_errors()
2184 if (rdev->last_read_error.tv_sec == 0 && check_decay_read_errors()
2185 rdev->last_read_error.tv_nsec == 0) { check_decay_read_errors()
2187 rdev->last_read_error = cur_time_mon; check_decay_read_errors()
2192 rdev->last_read_error.tv_sec) / 3600; check_decay_read_errors()
2194 rdev->last_read_error = cur_time_mon; check_decay_read_errors()
2202 atomic_set(&rdev->read_errors, 0); check_decay_read_errors()
2204 atomic_set(&rdev->read_errors, read_errors >> hours_since_last); check_decay_read_errors()
2207 static int r10_sync_page_io(struct md_rdev *rdev, sector_t sector, r10_sync_page_io() argument
2213 if (is_badblock(rdev, sector, sectors, &first_bad, &bad_sectors) r10_sync_page_io()
2214 && (rw == READ || test_bit(WriteErrorSeen, &rdev->flags))) r10_sync_page_io()
2216 if (sync_page_io(rdev, sector, sectors << 9, page, rw, false)) r10_sync_page_io()
2220 set_bit(WriteErrorSeen, &rdev->flags); r10_sync_page_io()
2221 if (!test_and_set_bit(WantReplacement, &rdev->flags)) r10_sync_page_io()
2223 &rdev->mddev->recovery); r10_sync_page_io()
2226 if (!rdev_set_badblocks(rdev, sector, sectors, 0)) r10_sync_page_io()
2227 md_error(rdev->mddev, rdev); r10_sync_page_io()
2243 struct md_rdev*rdev; fix_read_error() local
2247 /* still own a reference to this rdev, so it cannot fix_read_error()
2250 rdev = conf->mirrors[d].rdev; fix_read_error()
2252 if (test_bit(Faulty, &rdev->flags)) fix_read_error()
2257 check_decay_read_errors(mddev, rdev); fix_read_error()
2258 atomic_inc(&rdev->read_errors); fix_read_error()
2259 if (atomic_read(&rdev->read_errors) > max_read_errors) { fix_read_error()
2261 bdevname(rdev->bdev, b); fix_read_error()
2267 atomic_read(&rdev->read_errors), max_read_errors); fix_read_error()
2271 md_error(mddev, conf->mirrors[d].rdev); fix_read_error()
2291 rdev = rcu_dereference(conf->mirrors[d].rdev); fix_read_error()
2292 if (rdev && fix_read_error()
2293 test_bit(In_sync, &rdev->flags) && fix_read_error()
2294 is_badblock(rdev, r10_bio->devs[sl].addr + sect, s, fix_read_error()
2296 atomic_inc(&rdev->nr_pending); fix_read_error()
2298 success = sync_page_io(rdev, fix_read_error()
2303 rdev_dec_pending(rdev, mddev); fix_read_error()
2320 rdev = conf->mirrors[dn].rdev; fix_read_error()
2323 rdev, fix_read_error()
2327 md_error(mddev, rdev); fix_read_error()
2344 rdev = rcu_dereference(conf->mirrors[d].rdev); fix_read_error()
2345 if (!rdev || fix_read_error()
2346 !test_bit(In_sync, &rdev->flags)) fix_read_error()
2349 atomic_inc(&rdev->nr_pending); fix_read_error()
2351 if (r10_sync_page_io(rdev, fix_read_error()
2365 rdev)), fix_read_error()
2366 bdevname(rdev->bdev, b)); fix_read_error()
2370 bdevname(rdev->bdev, b)); fix_read_error()
2372 rdev_dec_pending(rdev, mddev); fix_read_error()
2383 rdev = rcu_dereference(conf->mirrors[d].rdev); fix_read_error()
2384 if (!rdev || fix_read_error()
2385 !test_bit(In_sync, &rdev->flags)) fix_read_error()
2388 atomic_inc(&rdev->nr_pending); fix_read_error()
2390 switch (r10_sync_page_io(rdev, fix_read_error()
2404 choose_data_offset(r10_bio, rdev)), fix_read_error()
2405 bdevname(rdev->bdev, b)); fix_read_error()
2409 bdevname(rdev->bdev, b)); fix_read_error()
2418 choose_data_offset(r10_bio, rdev)), fix_read_error()
2419 bdevname(rdev->bdev, b)); fix_read_error()
2420 atomic_add(s, &rdev->corrected_errors); fix_read_error()
2423 rdev_dec_pending(rdev, mddev); fix_read_error()
2438 struct md_rdev *rdev = conf->mirrors[r10_bio->devs[i].devnum].rdev; narrow_write_error() local
2447 * We currently own a reference to the rdev. narrow_write_error()
2456 if (rdev->badblocks.shift < 0) narrow_write_error()
2459 block_sectors = roundup(1 << rdev->badblocks.shift, narrow_write_error()
2460 bdev_logical_block_size(rdev->bdev) >> 9); narrow_write_error()
2474 choose_data_offset(r10_bio, rdev) + narrow_write_error()
2476 wbio->bi_bdev = rdev->bdev; narrow_write_error()
2479 ok = rdev_set_badblocks(rdev, sector, narrow_write_error()
2496 struct md_rdev *rdev = r10_bio->devs[slot].rdev; handle_read_error() local
2521 rdev_dec_pending(rdev, mddev); handle_read_error()
2524 rdev = read_balance(conf, r10_bio, &max_sectors); handle_read_error()
2525 if (rdev == NULL) { handle_read_error()
2541 bdevname(rdev->bdev, b), handle_read_error()
2547 r10_bio->devs[slot].rdev = rdev; handle_read_error()
2549 + choose_data_offset(r10_bio, rdev); handle_read_error()
2550 bio->bi_bdev = rdev->bdev; handle_read_error()
2594 struct md_rdev *rdev; handle_write_completed() local
2600 rdev = conf->mirrors[dev].rdev; handle_write_completed()
2605 rdev, handle_write_completed()
2610 rdev, handle_write_completed()
2613 md_error(conf->mddev, rdev); handle_write_completed()
2615 rdev = conf->mirrors[dev].replacement; handle_write_completed()
2621 rdev, handle_write_completed()
2626 rdev, handle_write_completed()
2629 md_error(conf->mddev, rdev); handle_write_completed()
2638 rdev = conf->mirrors[dev].rdev; handle_write_completed()
2641 rdev, handle_write_completed()
2644 rdev_dec_pending(rdev, conf->mddev); handle_write_completed()
2648 md_error(conf->mddev, rdev); handle_write_completed()
2652 rdev_dec_pending(rdev, conf->mddev); handle_write_completed()
2655 rdev = conf->mirrors[dev].replacement; handle_write_completed()
2656 if (rdev && bio == IO_MADE_GOOD) { handle_write_completed()
2658 rdev, handle_write_completed()
2661 rdev_dec_pending(rdev, conf->mddev); handle_write_completed()
2944 if ((mirror->rdev == NULL || sync_request()
2945 test_bit(In_sync, &mirror->rdev->flags)) sync_request()
2998 if (conf->mirrors[j].rdev == NULL || sync_request()
2999 test_bit(Faulty, &conf->mirrors[j].rdev->flags)) { sync_request()
3012 struct md_rdev *rdev; sync_request() local
3015 if (!conf->mirrors[d].rdev || sync_request()
3016 !test_bit(In_sync, &conf->mirrors[d].rdev->flags)) sync_request()
3020 rdev = conf->mirrors[d].rdev; sync_request()
3023 if (is_badblock(rdev, sector, max_sync, sync_request()
3044 rdev->data_offset; sync_request()
3045 bio->bi_bdev = rdev->bdev; sync_request()
3046 atomic_inc(&rdev->nr_pending); sync_request()
3059 rdev = mirror->rdev; sync_request()
3060 if (!test_bit(In_sync, &rdev->flags)) { sync_request()
3069 + rdev->data_offset; sync_request()
3070 bio->bi_bdev = rdev->bdev; sync_request()
3079 rdev = mirror->replacement; sync_request()
3080 /* Note: if rdev != NULL, then bio sync_request()
3088 if (rdev == NULL || bio == NULL || sync_request()
3089 test_bit(Faulty, &rdev->flags)) sync_request()
3098 rdev->data_offset; sync_request()
3099 bio->bi_bdev = rdev->bdev; sync_request()
3115 &mirror->rdev->flags) sync_request()
3117 mirror->rdev, sync_request()
3194 if (conf->mirrors[d].rdev == NULL || sync_request()
3195 test_bit(Faulty, &conf->mirrors[d].rdev->flags)) sync_request()
3198 if (is_badblock(conf->mirrors[d].rdev, sync_request()
3210 atomic_inc(&conf->mirrors[d].rdev->nr_pending); sync_request()
3218 conf->mirrors[d].rdev->data_offset; sync_request()
3219 bio->bi_bdev = conf->mirrors[d].rdev->bdev; sync_request()
3233 atomic_inc(&conf->mirrors[d].rdev->nr_pending); sync_request()
3249 rdev_dec_pending(conf->mirrors[d].rdev, sync_request()
3534 struct md_rdev *rdev; run() local
3566 rdev_for_each(rdev, mddev) { rdev_for_each()
3570 disk_idx = rdev->raid_disk; rdev_for_each()
3578 if (test_bit(Replacement, &rdev->flags)) { rdev_for_each()
3581 disk->replacement = rdev; rdev_for_each()
3583 if (disk->rdev) rdev_for_each()
3585 disk->rdev = rdev; rdev_for_each()
3587 q = bdev_get_queue(rdev->bdev); rdev_for_each()
3588 diff = (rdev->new_data_offset - rdev->data_offset); rdev_for_each()
3597 disk_stack_limits(mddev->gendisk, rdev->bdev, rdev_for_each()
3598 rdev->data_offset << 9); rdev_for_each()
3602 if (blk_queue_discard(bdev_get_queue(rdev->bdev))) rdev_for_each()
3639 if (!disk->rdev && disk->replacement) {
3641 disk->rdev = disk->replacement;
3643 clear_bit(Replacement, &disk->rdev->flags);
3646 if (!disk->rdev ||
3647 !test_bit(In_sync, &disk->rdev->flags)) {
3650 if (disk->rdev &&
3651 disk->rdev->saved_raid_disk < 0)
3800 struct md_rdev *rdev; raid10_takeover_raid0() local
3823 rdev_for_each(rdev, mddev) rdev_for_each()
3824 if (rdev->raid_disk >= 0) { rdev_for_each()
3825 rdev->new_raid_disk = rdev->raid_disk * 2; rdev_for_each()
3826 rdev->sectors = size; rdev_for_each()
3930 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); calc_degraded() local
3931 if (!rdev || test_bit(Faulty, &rdev->flags)) calc_degraded()
3933 else if (!test_bit(In_sync, &rdev->flags)) calc_degraded()
3946 struct md_rdev *rdev = rcu_dereference(conf->mirrors[i].rdev); calc_degraded() local
3947 if (!rdev || test_bit(Faulty, &rdev->flags)) calc_degraded()
3949 else if (!test_bit(In_sync, &rdev->flags)) { calc_degraded()
3982 struct md_rdev *rdev; raid10_start_reshape() local
3997 rdev_for_each(rdev, mddev) { rdev_for_each()
3998 if (!test_bit(In_sync, &rdev->flags) rdev_for_each()
3999 && !test_bit(Faulty, &rdev->flags)) rdev_for_each()
4001 if (rdev->raid_disk >= 0) { rdev_for_each()
4002 long long diff = (rdev->new_data_offset rdev_for_each()
4003 - rdev->data_offset); rdev_for_each()
4056 rdev_for_each(rdev, mddev) rdev_for_each()
4057 if (rdev->raid_disk < 0 && rdev_for_each()
4058 !test_bit(Faulty, &rdev->flags)) { rdev_for_each()
4059 if (raid10_add_disk(mddev, rdev) == 0) { rdev_for_each()
4060 if (rdev->raid_disk >= rdev_for_each()
4062 set_bit(In_sync, &rdev->flags); rdev_for_each()
4064 rdev->recovery_offset = 0; rdev_for_each()
4066 if (sysfs_link_rdev(mddev, rdev)) rdev_for_each()
4069 } else if (rdev->raid_disk >= conf->prev.raid_disks
4070 && !test_bit(Faulty, &rdev->flags)) {
4072 set_bit(In_sync, &rdev->flags);
4108 rdev_for_each(rdev, mddev)
4109 rdev->new_data_offset = rdev->data_offset;
4195 struct md_rdev *rdev; reshape_request() local
4301 rdev = read_balance(conf, r10_bio, &max_sectors); reshape_request()
4304 if (!rdev) { reshape_request()
4316 read_bio->bi_bdev = rdev->bdev; reshape_request()
4318 + rdev->data_offset); reshape_request()
4343 rdev2 = conf->mirrors[d].rdev; reshape_request()
4438 struct md_rdev *rdev; reshape_request_write() local
4440 rdev = conf->mirrors[d].replacement; reshape_request_write()
4443 rdev = conf->mirrors[d].rdev; reshape_request_write()
4446 if (!rdev || test_bit(Faulty, &rdev->flags)) reshape_request_write()
4448 atomic_inc(&rdev->nr_pending); reshape_request_write()
4511 struct md_rdev *rdev = conf->mirrors[d].rdev; handle_reshape_read_error() local
4513 if (rdev == NULL || handle_reshape_read_error()
4514 test_bit(Faulty, &rdev->flags) || handle_reshape_read_error()
4515 !test_bit(In_sync, &rdev->flags)) handle_reshape_read_error()
4519 success = sync_page_io(rdev, handle_reshape_read_error()
4553 struct md_rdev *rdev = NULL; end_reshape_write() local
4557 rdev = conf->mirrors[d].replacement; end_reshape_write()
4558 if (!rdev) { end_reshape_write()
4560 rdev = conf->mirrors[d].rdev; end_reshape_write()
4565 md_error(mddev, rdev); end_reshape_write()
4568 rdev_dec_pending(rdev, mddev); end_reshape_write()
4603 struct md_rdev *rdev = conf->mirrors[d].rdev; raid10_finish_reshape() local
4604 if (rdev) raid10_finish_reshape()
4605 clear_bit(In_sync, &rdev->flags); raid10_finish_reshape()
4606 rdev = conf->mirrors[d].replacement; raid10_finish_reshape()
4607 if (rdev) raid10_finish_reshape()
4608 clear_bit(In_sync, &rdev->flags); raid10_finish_reshape()
1001 choose_data_offset(struct r10bio *r10_bio, struct md_rdev *rdev) choose_data_offset() argument
H A Dlinear.c63 struct request_queue *q = bdev_get_queue(conf->disks[i].rdev->bdev); linear_congested()
86 struct md_rdev *rdev; linear_conf() local
98 rdev_for_each(rdev, mddev) { rdev_for_each()
99 int j = rdev->raid_disk; rdev_for_each()
103 if (j < 0 || j >= raid_disks || disk->rdev) { rdev_for_each()
109 disk->rdev = rdev; rdev_for_each()
111 sectors = rdev->sectors; rdev_for_each()
113 rdev->sectors = sectors * mddev->chunk_sectors; rdev_for_each()
116 disk_stack_limits(mddev->gendisk, rdev->bdev, rdev_for_each()
117 rdev->data_offset << 9); rdev_for_each()
119 conf->array_sectors += rdev->sectors; rdev_for_each()
122 if (blk_queue_discard(bdev_get_queue(rdev->bdev))) rdev_for_each()
139 conf->disks[0].end_sector = conf->disks[0].rdev->sectors;
144 conf->disks[i].rdev->sectors;
175 static int linear_add(struct mddev *mddev, struct md_rdev *rdev) linear_add() argument
187 if (rdev->saved_raid_disk != mddev->raid_disks) linear_add()
190 rdev->raid_disk = rdev->saved_raid_disk; linear_add()
191 rdev->saved_raid_disk = -1; linear_add()
231 start_sector = tmp_dev->end_sector - tmp_dev->rdev->sectors; linear_make_request()
233 data_offset = tmp_dev->rdev->data_offset; linear_make_request()
234 bio->bi_bdev = tmp_dev->rdev->bdev; linear_make_request()
270 bdevname(tmp_dev->rdev->bdev, b), linear_make_request()
271 (unsigned long long)tmp_dev->rdev->sectors, linear_make_request()
H A Ddm-raid.c28 #define FirstUse 10 /* rdev flag */
45 struct md_rdev rdev; member in struct:raid_dev
178 md_rdev_init(&rs->dev[i].rdev); context_alloc()
199 md_rdev_clear(&rs->dev[i].rdev); context_free()
231 rs->dev[i].rdev.raid_disk = i; dev_parms()
240 rs->dev[i].rdev.data_offset = 0; dev_parms()
241 rs->dev[i].rdev.mddev = &rs->md; dev_parms()
251 rs->dev[i].rdev.sb_page = alloc_page(GFP_KERNEL); dev_parms()
252 if (!rs->dev[i].rdev.sb_page) dev_parms()
257 if (!test_bit(In_sync, &rs->dev[i].rdev.flags) && dev_parms()
258 (!rs->dev[i].rdev.recovery_offset)) { dev_parms()
280 rs->dev[i].rdev.meta_bdev = rs->dev[i].meta_dev->bdev; dev_parms()
282 rs->dev[i].rdev.bdev = rs->dev[i].data_dev->bdev; dev_parms()
283 list_add(&rs->dev[i].rdev.same_set, &rs->md.disks); dev_parms()
284 if (!test_bit(In_sync, &rs->dev[i].rdev.flags)) dev_parms()
390 if (!test_bit(In_sync, &rs->dev[i].rdev.flags) || validate_raid_redundancy()
391 !rs->dev[i].rdev.sb_page) validate_raid_redundancy()
429 if ((!rs->dev[d].rdev.sb_page || validate_raid_redundancy()
430 !test_bit(In_sync, &rs->dev[d].rdev.flags)) && validate_raid_redundancy()
455 if ((!rs->dev[i].rdev.sb_page || validate_raid_redundancy()
456 !test_bit(In_sync, &rs->dev[i].rdev.flags)) && validate_raid_redundancy()
547 set_bit(In_sync, &rs->dev[i].rdev.flags); parse_raid_params()
548 rs->dev[i].rdev.recovery_offset = MaxSector; parse_raid_params()
602 clear_bit(In_sync, &rs->dev[value].rdev.flags); parse_raid_params()
603 rs->dev[value].rdev.recovery_offset = 0; parse_raid_params()
614 set_bit(WriteMostly, &rs->dev[value].rdev.flags); parse_raid_params()
788 static int read_disk_sb(struct md_rdev *rdev, int size) read_disk_sb() argument
790 BUG_ON(!rdev->sb_page); read_disk_sb()
792 if (rdev->sb_loaded) read_disk_sb()
795 if (!sync_page_io(rdev, 0, size, rdev->sb_page, READ, 1)) { read_disk_sb()
797 rdev->raid_disk); read_disk_sb()
798 md_error(rdev->mddev, rdev); read_disk_sb()
802 rdev->sb_loaded = 1; read_disk_sb()
807 static void super_sync(struct mddev *mddev, struct md_rdev *rdev) super_sync() argument
814 sb = page_address(rdev->sb_page); super_sync()
819 test_bit(Faulty, &(rs->dev[i].rdev.flags))) super_sync()
822 memset(sb + 1, 0, rdev->sb_size - sizeof(*sb)); super_sync()
828 sb->array_position = cpu_to_le32(rdev->raid_disk); super_sync()
833 sb->disk_recovery_offset = cpu_to_le64(rdev->recovery_offset); super_sync()
847 * Return: 1 if use rdev, 0 if use refdev, -Exxx otherwise
849 static int super_load(struct md_rdev *rdev, struct md_rdev *refdev) super_load() argument
856 rdev->sb_start = 0; super_load()
857 rdev->sb_size = bdev_logical_block_size(rdev->meta_bdev); super_load()
858 if (rdev->sb_size < sizeof(*sb) || rdev->sb_size > PAGE_SIZE) { super_load()
863 ret = read_disk_sb(rdev, rdev->sb_size); super_load()
867 sb = page_address(rdev->sb_page); super_load()
875 (!test_bit(In_sync, &rdev->flags) && !rdev->recovery_offset)) { super_load()
876 super_sync(rdev->mddev, rdev); super_load()
878 set_bit(FirstUse, &rdev->flags); super_load()
881 set_bit(MD_CHANGE_DEVS, &rdev->mddev->flags); super_load()
898 static int super_init_validation(struct mddev *mddev, struct md_rdev *rdev) super_init_validation() argument
910 sb = page_address(rdev->sb_page); super_init_validation()
1028 static int super_validate(struct raid_set *rs, struct md_rdev *rdev) super_validate() argument
1031 struct dm_raid_superblock *sb = page_address(rdev->sb_page); super_validate()
1037 if (!mddev->events && super_init_validation(mddev, rdev)) super_validate()
1042 rdev->mddev->bitmap_info.default_offset = mddev->bitmap_info.offset; super_validate()
1044 if (!test_bit(FirstUse, &rdev->flags)) { super_validate()
1045 rdev->recovery_offset = le64_to_cpu(sb->disk_recovery_offset); super_validate()
1046 if (rdev->recovery_offset != MaxSector) super_validate()
1047 clear_bit(In_sync, &rdev->flags); super_validate()
1053 if (test_bit(Faulty, &rdev->flags)) { super_validate()
1054 clear_bit(Faulty, &rdev->flags); super_validate()
1055 clear_bit(In_sync, &rdev->flags); super_validate()
1056 rdev->saved_raid_disk = rdev->raid_disk; super_validate()
1057 rdev->recovery_offset = 0; super_validate()
1060 clear_bit(FirstUse, &rdev->flags); super_validate()
1072 struct md_rdev *rdev, *tmp, *freshest; analyse_superblocks() local
1076 rdev_for_each_safe(rdev, tmp, mddev) { rdev_for_each_safe()
1087 rdev->sectors = to_sector(i_size_read(rdev->bdev->bd_inode)); rdev_for_each_safe()
1092 if (!rdev->meta_bdev) rdev_for_each_safe()
1095 ret = super_load(rdev, freshest); rdev_for_each_safe()
1099 freshest = rdev; rdev_for_each_safe()
1104 dev = container_of(rdev, struct raid_dev, rdev); rdev_for_each_safe()
1109 rdev->meta_bdev = NULL; rdev_for_each_safe()
1111 if (rdev->sb_page) rdev_for_each_safe()
1112 put_page(rdev->sb_page); rdev_for_each_safe()
1114 rdev->sb_page = NULL; rdev_for_each_safe()
1116 rdev->sb_loaded = 0; rdev_for_each_safe()
1128 rdev->bdev = NULL; rdev_for_each_safe()
1130 list_del(&rdev->same_set); rdev_for_each_safe()
1150 rdev_for_each(rdev, mddev)
1151 if ((rdev != freshest) && super_validate(rs, rdev))
1175 if (!rs->dev[i].rdev.bdev) configure_discard_support()
1178 q = bdev_get_queue(rs->dev[i].rdev.bdev); configure_discard_support()
1403 if (!test_bit(In_sync, &rs->dev[i].rdev.flags)) raid_status()
1419 if (test_bit(Faulty, &rs->dev[i].rdev.flags)) raid_status()
1422 !test_bit(In_sync, &rs->dev[i].rdev.flags)) raid_status()
1462 !test_bit(In_sync, &rs->dev[i].rdev.flags)) raid_status()
1465 test_bit(WriteMostly, &rs->dev[i].rdev.flags)) raid_status()
1485 !test_bit(In_sync, &rs->dev[i].rdev.flags)) raid_status()
1500 test_bit(WriteMostly, &rs->dev[i].rdev.flags)) raid_status()
1647 r = &rs->dev[i].rdev; attempt_restore_of_faulty_devices()
/linux-4.4.14/drivers/mfd/
H A Dretu-mfd.c143 int retu_read(struct retu_dev *rdev, u8 reg) retu_read() argument
148 mutex_lock(&rdev->mutex); retu_read()
149 ret = regmap_read(rdev->regmap, reg, &value); retu_read()
150 mutex_unlock(&rdev->mutex); retu_read()
156 int retu_write(struct retu_dev *rdev, u8 reg, u16 data) retu_write() argument
160 mutex_lock(&rdev->mutex); retu_write()
161 ret = regmap_write(rdev->regmap, reg, data); retu_write()
162 mutex_unlock(&rdev->mutex); retu_write()
170 struct retu_dev *rdev = retu_pm_power_off; retu_power_off() local
176 regmap_read(rdev->regmap, RETU_REG_CC1, &reg); retu_power_off()
177 regmap_write(rdev->regmap, RETU_REG_CC1, reg | 2); retu_power_off()
180 regmap_write(rdev->regmap, RETU_REG_WATCHDOG, 0); retu_power_off()
233 struct retu_dev *rdev; retu_probe() local
240 rdev = devm_kzalloc(&i2c->dev, sizeof(*rdev), GFP_KERNEL); retu_probe()
241 if (rdev == NULL) retu_probe()
244 i2c_set_clientdata(i2c, rdev); retu_probe()
245 rdev->dev = &i2c->dev; retu_probe()
246 mutex_init(&rdev->mutex); retu_probe()
247 rdev->regmap = devm_regmap_init(&i2c->dev, &retu_bus, &i2c->dev, retu_probe()
249 if (IS_ERR(rdev->regmap)) retu_probe()
250 return PTR_ERR(rdev->regmap); retu_probe()
252 ret = retu_read(rdev, RETU_REG_ASICR); retu_probe()
254 dev_err(rdev->dev, "could not read %s revision: %d\n", retu_probe()
259 dev_info(rdev->dev, "%s%s%s v%d.%d found\n", rdat->chip_name, retu_probe()
265 ret = retu_write(rdev, rdat->irq_chip->mask_base, 0xffff); retu_probe()
269 ret = regmap_add_irq_chip(rdev->regmap, i2c->irq, IRQF_ONESHOT, -1, retu_probe()
270 rdat->irq_chip, &rdev->irq_data); retu_probe()
274 ret = mfd_add_devices(rdev->dev, -1, rdat->children, rdat->nchildren, retu_probe()
275 NULL, regmap_irq_chip_get_base(rdev->irq_data), retu_probe()
278 regmap_del_irq_chip(i2c->irq, rdev->irq_data); retu_probe()
283 retu_pm_power_off = rdev; retu_probe()
292 struct retu_dev *rdev = i2c_get_clientdata(i2c); retu_remove() local
294 if (retu_pm_power_off == rdev) { retu_remove()
298 mfd_remove_devices(rdev->dev); retu_remove()
299 regmap_del_irq_chip(i2c->irq, rdev->irq_data); retu_remove()
/linux-4.4.14/drivers/media/rc/
H A Dir-hix5hd2.c68 struct rc_dev *rdev; member in struct:hix5hd2_ir_priv
120 static int hix5hd2_ir_open(struct rc_dev *rdev) hix5hd2_ir_open() argument
122 struct hix5hd2_ir_priv *priv = rdev->priv; hix5hd2_ir_open()
128 static void hix5hd2_ir_close(struct rc_dev *rdev) hix5hd2_ir_close() argument
130 struct hix5hd2_ir_priv *priv = rdev->priv; hix5hd2_ir_close()
149 ir_raw_event_reset(priv->rdev); hix5hd2_ir_rx_interrupt()
171 ir_raw_event_store(priv->rdev, &ev); hix5hd2_ir_rx_interrupt()
176 ir_raw_event_store(priv->rdev, &ev); hix5hd2_ir_rx_interrupt()
178 ir_raw_event_set_idle(priv->rdev, true); hix5hd2_ir_rx_interrupt()
189 ir_raw_event_handle(priv->rdev); hix5hd2_ir_rx_interrupt()
195 struct rc_dev *rdev; hix5hd2_ir_probe() local
225 rdev = rc_allocate_device(); hix5hd2_ir_probe()
226 if (!rdev) hix5hd2_ir_probe()
238 rdev->driver_type = RC_DRIVER_IR_RAW; hix5hd2_ir_probe()
239 rdev->allowed_protocols = RC_BIT_ALL; hix5hd2_ir_probe()
240 rdev->priv = priv; hix5hd2_ir_probe()
241 rdev->open = hix5hd2_ir_open; hix5hd2_ir_probe()
242 rdev->close = hix5hd2_ir_close; hix5hd2_ir_probe()
243 rdev->driver_name = IR_HIX5HD2_NAME; hix5hd2_ir_probe()
245 rdev->map_name = map_name ?: RC_MAP_EMPTY; hix5hd2_ir_probe()
246 rdev->input_name = IR_HIX5HD2_NAME; hix5hd2_ir_probe()
247 rdev->input_phys = IR_HIX5HD2_NAME "/input0"; hix5hd2_ir_probe()
248 rdev->input_id.bustype = BUS_HOST; hix5hd2_ir_probe()
249 rdev->input_id.vendor = 0x0001; hix5hd2_ir_probe()
250 rdev->input_id.product = 0x0001; hix5hd2_ir_probe()
251 rdev->input_id.version = 0x0100; hix5hd2_ir_probe()
252 rdev->rx_resolution = US_TO_NS(10); hix5hd2_ir_probe()
253 rdev->timeout = US_TO_NS(IR_CFG_SYMBOL_MAXWIDTH * 10); hix5hd2_ir_probe()
255 ret = rc_register_device(rdev); hix5hd2_ir_probe()
266 priv->rdev = rdev; hix5hd2_ir_probe()
273 rc_unregister_device(rdev); hix5hd2_ir_probe()
274 rdev = NULL; hix5hd2_ir_probe()
278 rc_free_device(rdev); hix5hd2_ir_probe()
288 rc_unregister_device(priv->rdev); hix5hd2_ir_remove()
H A Dst_rc.c27 struct rc_dev *rdev; member in struct:st_rc_device
67 static void st_rc_send_lirc_timeout(struct rc_dev *rdev) st_rc_send_lirc_timeout() argument
71 ir_raw_event_store(rdev, &ev); st_rc_send_lirc_timeout()
113 ir_raw_event_reset(dev->rdev); st_rc_rx_interrupt()
138 ir_raw_event_store(dev->rdev, &ev); st_rc_rx_interrupt()
143 ir_raw_event_store(dev->rdev, &ev); st_rc_rx_interrupt()
145 st_rc_send_lirc_timeout(dev->rdev); st_rc_rx_interrupt()
156 ir_raw_event_handle(dev->rdev); st_rc_rx_interrupt()
194 rc_unregister_device(rc_dev->rdev); st_rc_remove()
198 static int st_rc_open(struct rc_dev *rdev) st_rc_open() argument
200 struct st_rc_device *dev = rdev->priv; st_rc_open()
211 static void st_rc_close(struct rc_dev *rdev) st_rc_close() argument
213 struct st_rc_device *dev = rdev->priv; st_rc_close()
222 struct rc_dev *rdev; st_rc_probe() local
234 rdev = rc_allocate_device(); st_rc_probe()
236 if (!rdev) st_rc_probe()
289 rdev->driver_type = RC_DRIVER_IR_RAW; st_rc_probe()
290 rdev->allowed_protocols = RC_BIT_ALL; st_rc_probe()
292 rdev->rx_resolution = 100; st_rc_probe()
293 rdev->timeout = US_TO_NS(MAX_SYMB_TIME); st_rc_probe()
294 rdev->priv = rc_dev; st_rc_probe()
295 rdev->open = st_rc_open; st_rc_probe()
296 rdev->close = st_rc_close; st_rc_probe()
297 rdev->driver_name = IR_ST_NAME; st_rc_probe()
298 rdev->map_name = RC_MAP_LIRC; st_rc_probe()
299 rdev->input_name = "ST Remote Control Receiver"; st_rc_probe()
305 ret = rc_register_device(rdev); st_rc_probe()
309 rc_dev->rdev = rdev; st_rc_probe()
321 st_rc_send_lirc_timeout(rdev); st_rc_probe()
327 rc_unregister_device(rdev); st_rc_probe()
328 rdev = NULL; st_rc_probe()
332 rc_free_device(rdev); st_rc_probe()
362 struct rc_dev *rdev = rc_dev->rdev; st_rc_resume() local
370 if (rdev->users) { st_rc_resume()
H A Dene_ir.c359 ir_raw_event_store(dev->rdev, &ev); ene_rx_sense_carrier()
450 dev->rdev->rx_resolution = US_TO_NS(ENE_FW_SAMPLE_PERIOD_FAN); ene_rx_setup()
454 dev->rdev->min_timeout = dev->rdev->max_timeout = ene_rx_setup()
458 dev->rdev->rx_resolution = US_TO_NS(sample_period); ene_rx_setup()
465 dev->rdev->min_timeout = US_TO_NS(127 * sample_period); ene_rx_setup()
466 dev->rdev->max_timeout = US_TO_NS(200000); ene_rx_setup()
470 dev->rdev->tx_resolution = US_TO_NS(sample_period); ene_rx_setup()
472 if (dev->rdev->timeout > dev->rdev->max_timeout) ene_rx_setup()
473 dev->rdev->timeout = dev->rdev->max_timeout; ene_rx_setup()
474 if (dev->rdev->timeout < dev->rdev->min_timeout) ene_rx_setup()
475 dev->rdev->timeout = dev->rdev->min_timeout; ene_rx_setup()
506 ir_raw_event_set_idle(dev->rdev, true); ene_rx_enable_hw()
525 ir_raw_event_set_idle(dev->rdev, true); ene_rx_disable_hw()
818 ir_raw_event_store_with_filter(dev->rdev, &ev); ene_isr()
821 ir_raw_event_handle(dev->rdev); ene_isr()
836 dev->rdev->timeout = US_TO_NS(150000); ene_setup_default_settings()
851 static int ene_open(struct rc_dev *rdev) ene_open() argument
853 struct ene_device *dev = rdev->priv; ene_open()
863 static void ene_close(struct rc_dev *rdev) ene_close() argument
865 struct ene_device *dev = rdev->priv; ene_close()
874 static int ene_set_tx_mask(struct rc_dev *rdev, u32 tx_mask) ene_set_tx_mask() argument
876 struct ene_device *dev = rdev->priv; ene_set_tx_mask()
892 static int ene_set_tx_carrier(struct rc_dev *rdev, u32 carrier) ene_set_tx_carrier() argument
894 struct ene_device *dev = rdev->priv; ene_set_tx_carrier()
916 static int ene_set_tx_duty_cycle(struct rc_dev *rdev, u32 duty_cycle) ene_set_tx_duty_cycle() argument
918 struct ene_device *dev = rdev->priv; ene_set_tx_duty_cycle()
926 static int ene_set_learning_mode(struct rc_dev *rdev, int enable) ene_set_learning_mode() argument
928 struct ene_device *dev = rdev->priv; ene_set_learning_mode()
942 static int ene_set_carrier_report(struct rc_dev *rdev, int enable) ene_set_carrier_report() argument
944 struct ene_device *dev = rdev->priv; ene_set_carrier_report()
960 static void ene_set_idle(struct rc_dev *rdev, bool idle) ene_set_idle() argument
962 struct ene_device *dev = rdev->priv; ene_set_idle()
971 static int ene_transmit(struct rc_dev *rdev, unsigned *buf, unsigned n) ene_transmit() argument
973 struct ene_device *dev = rdev->priv; ene_transmit()
1010 struct rc_dev *rdev; ene_probe() local
1015 rdev = rc_allocate_device(); ene_probe()
1016 if (!dev || !rdev) ene_probe()
1061 rdev->driver_type = RC_DRIVER_IR_RAW; ene_probe()
1062 rdev->allowed_protocols = RC_BIT_ALL; ene_probe()
1063 rdev->priv = dev; ene_probe()
1064 rdev->open = ene_open; ene_probe()
1065 rdev->close = ene_close; ene_probe()
1066 rdev->s_idle = ene_set_idle; ene_probe()
1067 rdev->driver_name = ENE_DRIVER_NAME; ene_probe()
1068 rdev->map_name = RC_MAP_RC6_MCE; ene_probe()
1069 rdev->input_name = "ENE eHome Infrared Remote Receiver"; ene_probe()
1072 rdev->s_learning_mode = ene_set_learning_mode; ene_probe()
1074 rdev->tx_ir = ene_transmit; ene_probe()
1075 rdev->s_tx_mask = ene_set_tx_mask; ene_probe()
1076 rdev->s_tx_carrier = ene_set_tx_carrier; ene_probe()
1077 rdev->s_tx_duty_cycle = ene_set_tx_duty_cycle; ene_probe()
1078 rdev->s_carrier_report = ene_set_carrier_report; ene_probe()
1079 rdev->input_name = "ENE eHome Infrared Remote Transceiver"; ene_probe()
1082 dev->rdev = rdev; ene_probe()
1091 error = rc_register_device(rdev); ene_probe()
1112 rc_unregister_device(rdev); ene_probe()
1113 rdev = NULL; ene_probe()
1115 rc_free_device(rdev); ene_probe()
1133 rc_unregister_device(dev->rdev); ene_remove()
H A Dstreamzap.c76 struct rc_dev *rdev; member in struct:streamzap_ir
130 ir_raw_event_store_with_filter(sz->rdev, &rawir); sz_push()
260 rawir.duration = sz->rdev->timeout; streamzap_callback()
264 ir_raw_event_handle(sz->rdev); streamzap_callback()
265 ir_raw_event_reset(sz->rdev); streamzap_callback()
283 ir_raw_event_handle(sz->rdev); streamzap_callback()
291 struct rc_dev *rdev; streamzap_init_rc_dev() local
295 rdev = rc_allocate_device(); streamzap_init_rc_dev()
296 if (!rdev) { streamzap_init_rc_dev()
308 rdev->input_name = sz->name; streamzap_init_rc_dev()
309 rdev->input_phys = sz->phys; streamzap_init_rc_dev()
310 usb_to_input_id(sz->usbdev, &rdev->input_id); streamzap_init_rc_dev()
311 rdev->dev.parent = dev; streamzap_init_rc_dev()
312 rdev->priv = sz; streamzap_init_rc_dev()
313 rdev->driver_type = RC_DRIVER_IR_RAW; streamzap_init_rc_dev()
314 rdev->allowed_protocols = RC_BIT_ALL; streamzap_init_rc_dev()
315 rdev->driver_name = DRIVER_NAME; streamzap_init_rc_dev()
316 rdev->map_name = RC_MAP_STREAMZAP; streamzap_init_rc_dev()
318 ret = rc_register_device(rdev); streamzap_init_rc_dev()
324 return rdev; streamzap_init_rc_dev()
327 rc_free_device(rdev); streamzap_init_rc_dev()
414 sz->rdev = streamzap_init_rc_dev(sz); streamzap_probe()
415 if (!sz->rdev) streamzap_probe()
422 sz->rdev->timeout = ((US_TO_NS(SZ_TIMEOUT * SZ_RESOLUTION) & streamzap_probe()
481 rc_unregister_device(sz->rdev); streamzap_disconnect()
/linux-4.4.14/drivers/media/rc/img-ir/
H A Dimg-ir-raw.c25 struct rc_dev *rc_dev = priv->raw.rdev; img_ir_refresh_raw()
55 if (!raw->rdev) img_ir_isr_raw()
77 if (priv->raw.rdev) img_ir_echo_timer()
91 if (!priv->raw.rdev) img_ir_setup_raw()
106 struct rc_dev *rdev; img_ir_probe_raw() local
113 raw->rdev = rdev = rc_allocate_device(); img_ir_probe_raw()
114 if (!rdev) { img_ir_probe_raw()
118 rdev->priv = priv; img_ir_probe_raw()
119 rdev->map_name = RC_MAP_EMPTY; img_ir_probe_raw()
120 rdev->input_name = "IMG Infrared Decoder Raw"; img_ir_probe_raw()
121 rdev->driver_type = RC_DRIVER_IR_RAW; img_ir_probe_raw()
124 error = rc_register_device(rdev); img_ir_probe_raw()
127 rc_free_device(rdev); img_ir_probe_raw()
128 raw->rdev = NULL; img_ir_probe_raw()
138 struct rc_dev *rdev = raw->rdev; img_ir_remove_raw() local
141 if (!rdev) img_ir_remove_raw()
146 raw->rdev = NULL; img_ir_remove_raw()
153 rc_unregister_device(rdev); img_ir_remove_raw()
/linux-4.4.14/include/linux/
H A Dsunxi-rsb.h41 static inline void *sunxi_rsb_device_get_drvdata(const struct sunxi_rsb_device *rdev) sunxi_rsb_device_get_drvdata() argument
43 return dev_get_drvdata(&rdev->dev); sunxi_rsb_device_get_drvdata()
46 static inline void sunxi_rsb_device_set_drvdata(struct sunxi_rsb_device *rdev, sunxi_rsb_device_set_drvdata() argument
49 dev_set_drvdata(&rdev->dev, data); sunxi_rsb_device_set_drvdata()
61 int (*probe)(struct sunxi_rsb_device *rdev);
62 int (*remove)(struct sunxi_rsb_device *rdev);
86 struct regmap *__devm_regmap_init_sunxi_rsb(struct sunxi_rsb_device *rdev,
94 * @rdev: Device that will be interacted with
101 #define devm_regmap_init_sunxi_rsb(rdev, config) \
103 rdev, config)
H A Drio_drv.h142 * @rdev: RIO device
149 static inline int rio_read_config_32(struct rio_dev *rdev, u32 offset, rio_read_config_32() argument
152 return rio_mport_read_config_32(rdev->net->hport, rdev->destid, rio_read_config_32()
153 rdev->hopcount, offset, data); rio_read_config_32()
158 * @rdev: RIO device
165 static inline int rio_write_config_32(struct rio_dev *rdev, u32 offset, rio_write_config_32() argument
168 return rio_mport_write_config_32(rdev->net->hport, rdev->destid, rio_write_config_32()
169 rdev->hopcount, offset, data); rio_write_config_32()
174 * @rdev: RIO device
181 static inline int rio_read_config_16(struct rio_dev *rdev, u32 offset, rio_read_config_16() argument
184 return rio_mport_read_config_16(rdev->net->hport, rdev->destid, rio_read_config_16()
185 rdev->hopcount, offset, data); rio_read_config_16()
190 * @rdev: RIO device
197 static inline int rio_write_config_16(struct rio_dev *rdev, u32 offset, rio_write_config_16() argument
200 return rio_mport_write_config_16(rdev->net->hport, rdev->destid, rio_write_config_16()
201 rdev->hopcount, offset, data); rio_write_config_16()
206 * @rdev: RIO device
213 static inline int rio_read_config_8(struct rio_dev *rdev, u32 offset, u8 * data) rio_read_config_8() argument
215 return rio_mport_read_config_8(rdev->net->hport, rdev->destid, rio_read_config_8()
216 rdev->hopcount, offset, data); rio_read_config_8()
221 * @rdev: RIO device
228 static inline int rio_write_config_8(struct rio_dev *rdev, u32 offset, u8 data) rio_write_config_8() argument
230 return rio_mport_write_config_8(rdev->net->hport, rdev->destid, rio_write_config_8()
231 rdev->hopcount, offset, data); rio_write_config_8()
239 * @rdev: RIO device
245 static inline int rio_send_doorbell(struct rio_dev *rdev, u16 data) rio_send_doorbell() argument
247 return rio_mport_send_doorbell(rdev->net->hport, rdev->destid, data); rio_send_doorbell()
307 * @rdev: RIO device the message is be sent to
316 struct rio_dev *rdev, int mbox, rio_add_outb_message()
319 return mport->ops->add_outb_message(mport, rdev, mbox, rio_add_outb_message()
386 extern struct dma_chan *rio_request_dma(struct rio_dev *rdev);
390 struct rio_dev *rdev, struct dma_chan *dchan,
401 * @rdev: RIO device
406 static inline const char *rio_name(struct rio_dev *rdev) rio_name() argument
408 return dev_name(&rdev->dev); rio_name()
413 * @rdev: RIO device
418 static inline void *rio_get_drvdata(struct rio_dev *rdev) rio_get_drvdata() argument
420 return dev_get_drvdata(&rdev->dev); rio_get_drvdata()
425 * @rdev: RIO device
431 static inline void rio_set_drvdata(struct rio_dev *rdev, void *data) rio_set_drvdata() argument
433 dev_set_drvdata(&rdev->dev, data); rio_set_drvdata()
315 rio_add_outb_message(struct rio_mport *mport, struct rio_dev *rdev, int mbox, void *buffer, size_t len) rio_add_outb_message() argument
/linux-4.4.14/sound/drivers/pcsp/
H A Dpcsp_input.h10 int pcspkr_input_init(struct input_dev **rdev, struct device *dev);
/linux-4.4.14/drivers/infiniband/hw/cxgb3/
H A Dcxio_dbg.c40 void cxio_dump_tpt(struct cxio_rdev *rdev, u32 stag) cxio_dump_tpt() argument
53 m->addr = (stag>>8) * 32 + rdev->rnic_info.tpt_base; cxio_dump_tpt()
56 rc = rdev->t3cdev_p->ctl(rdev->t3cdev_p, RDMA_GET_MEM, m); cxio_dump_tpt()
73 void cxio_dump_pbl(struct cxio_rdev *rdev, u32 pbl_addr, uint len, u8 shift) cxio_dump_pbl() argument
94 rc = rdev->t3cdev_p->ctl(rdev->t3cdev_p, RDMA_GET_MEM, m); cxio_dump_pbl()
139 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents) cxio_dump_rqt() argument
152 m->addr = ((hwtid)<<10) + rdev->rnic_info.rqt_base; cxio_dump_rqt()
155 rc = rdev->t3cdev_p->ctl(rdev->t3cdev_p, RDMA_GET_MEM, m); cxio_dump_rqt()
172 void cxio_dump_tcb(struct cxio_rdev *rdev, u32 hwtid) cxio_dump_tcb() argument
188 rc = rdev->t3cdev_p->ctl(rdev->t3cdev_p, RDMA_GET_MEM, m); cxio_dump_tcb()
H A Dcxio_hal.h156 int cxio_rdev_open(struct cxio_rdev *rdev);
157 void cxio_rdev_close(struct cxio_rdev *rdev);
158 int cxio_hal_cq_op(struct cxio_rdev *rdev, struct t3_cq *cq,
160 int cxio_create_cq(struct cxio_rdev *rdev, struct t3_cq *cq, int kernel);
161 int cxio_destroy_cq(struct cxio_rdev *rdev, struct t3_cq *cq);
162 int cxio_resize_cq(struct cxio_rdev *rdev, struct t3_cq *cq);
163 void cxio_release_ucontext(struct cxio_rdev *rdev, struct cxio_ucontext *uctx);
164 void cxio_init_ucontext(struct cxio_rdev *rdev, struct cxio_ucontext *uctx);
165 int cxio_create_qp(struct cxio_rdev *rdev, u32 kernel_domain, struct t3_wq *wq,
167 int cxio_destroy_qp(struct cxio_rdev *rdev, struct t3_wq *wq,
172 int cxio_register_phys_mem(struct cxio_rdev *rdev, u32 * stag, u32 pdid,
175 int cxio_reregister_phys_mem(struct cxio_rdev *rdev, u32 * stag, u32 pdid,
178 int cxio_dereg_mem(struct cxio_rdev *rdev, u32 stag, u32 pbl_size,
180 int cxio_allocate_window(struct cxio_rdev *rdev, u32 * stag, u32 pdid);
181 int cxio_allocate_stag(struct cxio_rdev *rdev, u32 *stag, u32 pdid, u32 pbl_size, u32 pbl_addr);
182 int cxio_deallocate_window(struct cxio_rdev *rdev, u32 stag);
183 int cxio_rdma_init(struct cxio_rdev *rdev, struct t3_rdma_init_attr *attr);
207 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents);
208 void cxio_dump_tcb(struct cxio_rdev *rdev, u32 hwtid);
H A Diwch.c79 ring_doorbell(qhp->rhp->rdev.ctrl_qp.doorbell, qhp->wq.qpid); enable_qp_db()
121 rnicp->attr.max_mem_regs = cxio_num_stags(&rnicp->rdev); rnic_init()
156 rnicp->rdev.ulp = rnicp; open_rnic_dev()
157 rnicp->rdev.t3cdev_p = tdev; open_rnic_dev()
161 if (cxio_rdev_open(&rnicp->rdev)) { open_rnic_dev()
163 printk(KERN_ERR MOD "Unable to open CXIO rdev\n"); open_rnic_dev()
178 pci_name(rnicp->rdev.rnic_info.pdev)); open_rnic_dev()
188 if (dev->rdev.t3cdev_p == tdev) { close_rnic_dev()
189 dev->rdev.flags = CXIO_ERROR_FATAL; close_rnic_dev()
194 cxio_rdev_close(&dev->rdev); close_rnic_dev()
207 struct cxio_rdev *rdev = tdev->ulp; iwch_event_handler() local
213 if (!rdev) iwch_event_handler()
215 rnicp = rdev_to_iwch_dev(rdev); iwch_event_handler()
218 rdev->flags = CXIO_ERROR_FATAL; iwch_event_handler()
/linux-4.4.14/drivers/bus/
H A Dsunxi-rsb.c142 struct sunxi_rsb_device *rdev = to_sunxi_rsb_device(dev); sunxi_rsb_device_probe() local
148 if (!rdev->irq) { sunxi_rsb_device_probe()
159 rdev->irq = irq; sunxi_rsb_device_probe()
166 return drv->probe(rdev); sunxi_rsb_device_probe()
185 struct sunxi_rsb_device *rdev = to_sunxi_rsb_device(dev); sunxi_rsb_dev_release() local
187 kfree(rdev); sunxi_rsb_dev_release()
201 struct sunxi_rsb_device *rdev; sunxi_rsb_device_create() local
203 rdev = kzalloc(sizeof(*rdev), GFP_KERNEL); sunxi_rsb_device_create()
204 if (!rdev) sunxi_rsb_device_create()
207 rdev->rsb = rsb; sunxi_rsb_device_create()
208 rdev->hwaddr = hwaddr; sunxi_rsb_device_create()
209 rdev->rtaddr = rtaddr; sunxi_rsb_device_create()
210 rdev->dev.bus = &sunxi_rsb_bus; sunxi_rsb_device_create()
211 rdev->dev.parent = rsb->dev; sunxi_rsb_device_create()
212 rdev->dev.of_node = node; sunxi_rsb_device_create()
213 rdev->dev.release = sunxi_rsb_dev_release; sunxi_rsb_device_create()
215 dev_set_name(&rdev->dev, "%s-%x", RSB_CTRL_NAME, hwaddr); sunxi_rsb_device_create()
217 err = device_register(&rdev->dev); sunxi_rsb_device_create()
219 dev_err(&rdev->dev, "Can't add %s, status %d\n", sunxi_rsb_device_create()
220 dev_name(&rdev->dev), err); sunxi_rsb_device_create()
224 dev_dbg(&rdev->dev, "device %s registered\n", dev_name(&rdev->dev)); sunxi_rsb_device_create()
227 put_device(&rdev->dev); sunxi_rsb_device_create()
234 * @rdev: rsb_device to be removed
236 static void sunxi_rsb_device_unregister(struct sunxi_rsb_device *rdev) sunxi_rsb_device_unregister() argument
238 device_unregister(&rdev->dev); sunxi_rsb_device_unregister()
243 struct sunxi_rsb_device *rdev = to_sunxi_rsb_device(dev); sunxi_rsb_remove_devices() local
246 sunxi_rsb_device_unregister(rdev); sunxi_rsb_remove_devices()
394 struct sunxi_rsb_device *rdev; member in struct:sunxi_rsb_ctx
402 struct sunxi_rsb_device *rdev = ctx->rdev; regmap_sunxi_rsb_reg_read() local
407 return sunxi_rsb_read(rdev->rsb, rdev->rtaddr, reg, val, ctx->size); regmap_sunxi_rsb_reg_read()
414 struct sunxi_rsb_device *rdev = ctx->rdev; regmap_sunxi_rsb_reg_write() local
416 return sunxi_rsb_write(rdev->rsb, rdev->rtaddr, reg, &val, ctx->size); regmap_sunxi_rsb_reg_write()
434 static struct sunxi_rsb_ctx *regmap_sunxi_rsb_init_ctx(struct sunxi_rsb_device *rdev, regmap_sunxi_rsb_init_ctx() argument
452 ctx->rdev = rdev; regmap_sunxi_rsb_init_ctx()
458 struct regmap *__devm_regmap_init_sunxi_rsb(struct sunxi_rsb_device *rdev, __devm_regmap_init_sunxi_rsb() argument
463 struct sunxi_rsb_ctx *ctx = regmap_sunxi_rsb_init_ctx(rdev, config); __devm_regmap_init_sunxi_rsb()
468 return __devm_regmap_init(&rdev->dev, &regmap_sunxi_rsb, ctx, config, __devm_regmap_init_sunxi_rsb()
595 struct sunxi_rsb_device *rdev; for_each_available_child_of_node() local
607 rdev = sunxi_rsb_device_create(rsb, child, hwaddr, rtaddr); for_each_available_child_of_node()
608 if (IS_ERR(rdev)) for_each_available_child_of_node()
610 child->full_name, PTR_ERR(rdev)); for_each_available_child_of_node()
/linux-4.4.14/drivers/crypto/caam/
H A Djr.h12 void caam_jr_free(struct device *rdev);
/linux-4.4.14/drivers/net/
H A Drionet.c67 struct rio_dev *rdev; member in struct:rionet_peer
145 struct rio_dev *rdev) rionet_queue_tx_msg()
149 rio_add_outb_message(rnet->mport, rdev, 0, skb->data, skb->len); rionet_queue_tx_msg()
243 if (peer->rdev->destid == sid) { rionet_dbell_event()
245 peer->rdev; rionet_dbell_event()
350 if (!(peer->res = rio_request_outb_dbell(peer->rdev, rionet_open()
360 rio_send_doorbell(peer->rdev, RIONET_DOORBELL_JOIN); rionet_open()
384 if (nets[rnet->mport->id].active[peer->rdev->destid]) { rionet_close()
385 rio_send_doorbell(peer->rdev, RIONET_DOORBELL_LEAVE); rionet_close()
386 nets[rnet->mport->id].active[peer->rdev->destid] = NULL; rionet_close()
388 rio_release_outb_dbell(peer->rdev, peer->res); rionet_close()
401 struct rio_dev *rdev = to_rio_dev(dev); rionet_remove_dev() local
402 unsigned char netid = rdev->net->hport->id; rionet_remove_dev()
405 if (dev_rionet_capable(rdev)) { rionet_remove_dev()
407 if (peer->rdev == rdev) { rionet_remove_dev()
408 if (nets[netid].active[rdev->destid]) { rionet_remove_dev()
409 nets[netid].active[rdev->destid] = NULL; rionet_remove_dev()
526 struct rio_dev *rdev = to_rio_dev(dev); rionet_add_dev() local
527 unsigned char netid = rdev->net->hport->id; rionet_add_dev()
541 rio_local_read_config_32(rdev->net->hport, RIO_SRC_OPS_CAR, rionet_add_dev()
543 rio_local_read_config_32(rdev->net->hport, RIO_DST_OPS_CAR, rionet_add_dev()
548 DRV_NAME, rdev->net->hport->name); rionet_add_dev()
559 rc = rionet_setup_netdev(rdev->net->hport, ndev); rionet_add_dev()
575 if (dev_rionet_capable(rdev)) { rionet_add_dev()
580 peer->rdev = rdev; rionet_add_dev()
144 rionet_queue_tx_msg(struct sk_buff *skb, struct net_device *ndev, struct rio_dev *rdev) rionet_queue_tx_msg() argument
/linux-4.4.14/drivers/watchdog/
H A Dretu_wdt.c32 struct retu_dev *rdev; member in struct:retu_wdt_dev
44 retu_write(wdev->rdev, RETU_REG_WATCHDOG, RETU_WDT_MAX_TIMER); retu_wdt_ping_enable()
51 retu_write(wdev->rdev, RETU_REG_WATCHDOG, RETU_WDT_MAX_TIMER); retu_wdt_ping_disable()
68 return retu_write(wdev->rdev, RETU_REG_WATCHDOG, wdog->timeout); retu_wdt_start()
84 return retu_write(wdev->rdev, RETU_REG_WATCHDOG, wdog->timeout); retu_wdt_ping()
93 return retu_write(wdev->rdev, RETU_REG_WATCHDOG, wdog->timeout); retu_wdt_set_timeout()
111 struct retu_dev *rdev = dev_get_drvdata(pdev->dev.parent); retu_wdt_probe() local
135 wdev->rdev = rdev; retu_wdt_probe()

Completed in 4944 milliseconds

123