__smem            343 drivers/soc/qcom/smem.c static struct qcom_smem *__smem;
__smem            447 drivers/soc/qcom/smem.c 	if (!__smem)
__smem            451 drivers/soc/qcom/smem.c 		dev_err(__smem->dev,
__smem            456 drivers/soc/qcom/smem.c 	if (WARN_ON(item >= __smem->item_count))
__smem            459 drivers/soc/qcom/smem.c 	ret = hwspin_lock_timeout_irqsave(__smem->hwlock,
__smem            465 drivers/soc/qcom/smem.c 	if (host < SMEM_HOST_COUNT && __smem->partitions[host]) {
__smem            466 drivers/soc/qcom/smem.c 		phdr = __smem->partitions[host];
__smem            467 drivers/soc/qcom/smem.c 		ret = qcom_smem_alloc_private(__smem, phdr, item, size);
__smem            468 drivers/soc/qcom/smem.c 	} else if (__smem->global_partition) {
__smem            469 drivers/soc/qcom/smem.c 		phdr = __smem->global_partition;
__smem            470 drivers/soc/qcom/smem.c 		ret = qcom_smem_alloc_private(__smem, phdr, item, size);
__smem            472 drivers/soc/qcom/smem.c 		ret = qcom_smem_alloc_global(__smem, item, size);
__smem            475 drivers/soc/qcom/smem.c 	hwspin_unlock_irqrestore(__smem->hwlock, &flags);
__smem            583 drivers/soc/qcom/smem.c 	if (!__smem)
__smem            586 drivers/soc/qcom/smem.c 	if (WARN_ON(item >= __smem->item_count))
__smem            589 drivers/soc/qcom/smem.c 	ret = hwspin_lock_timeout_irqsave(__smem->hwlock,
__smem            595 drivers/soc/qcom/smem.c 	if (host < SMEM_HOST_COUNT && __smem->partitions[host]) {
__smem            596 drivers/soc/qcom/smem.c 		phdr = __smem->partitions[host];
__smem            597 drivers/soc/qcom/smem.c 		cacheln = __smem->cacheline[host];
__smem            598 drivers/soc/qcom/smem.c 		ptr = qcom_smem_get_private(__smem, phdr, cacheln, item, size);
__smem            599 drivers/soc/qcom/smem.c 	} else if (__smem->global_partition) {
__smem            600 drivers/soc/qcom/smem.c 		phdr = __smem->global_partition;
__smem            601 drivers/soc/qcom/smem.c 		cacheln = __smem->global_cacheline;
__smem            602 drivers/soc/qcom/smem.c 		ptr = qcom_smem_get_private(__smem, phdr, cacheln, item, size);
__smem            604 drivers/soc/qcom/smem.c 		ptr = qcom_smem_get_global(__smem, item, size);
__smem            607 drivers/soc/qcom/smem.c 	hwspin_unlock_irqrestore(__smem->hwlock, &flags);
__smem            627 drivers/soc/qcom/smem.c 	if (!__smem)
__smem            630 drivers/soc/qcom/smem.c 	if (host < SMEM_HOST_COUNT && __smem->partitions[host]) {
__smem            631 drivers/soc/qcom/smem.c 		phdr = __smem->partitions[host];
__smem            634 drivers/soc/qcom/smem.c 	} else if (__smem->global_partition) {
__smem            635 drivers/soc/qcom/smem.c 		phdr = __smem->global_partition;
__smem            639 drivers/soc/qcom/smem.c 		header = __smem->regions[0].virt_base;
__smem            658 drivers/soc/qcom/smem.c 	for (i = 0; i < __smem->num_regions; i++) {
__smem            659 drivers/soc/qcom/smem.c 		struct smem_region *region = &__smem->regions[i];
__smem            965 drivers/soc/qcom/smem.c 	__smem = smem;
__smem            978 drivers/soc/qcom/smem.c 	platform_device_unregister(__smem->socinfo);
__smem            980 drivers/soc/qcom/smem.c 	hwspin_lock_free(__smem->hwlock);
__smem            981 drivers/soc/qcom/smem.c 	__smem = NULL;