1 
   2 
   3 
   4 
   5 
   6 
   7 
   8 
   9 
  10 
  11 
  12 
  13 
  14 
  15 
  16 
  17 
  18 
  19 
  20 
  21 
  22 
  23 
  24 
  25 
  26 
  27 
  28 
  29 
  30 
  31 
  32 
  33 
  34 
  35 
  36 #ifndef _GVT_MMIO_H_
  37 #define _GVT_MMIO_H_
  38 
  39 struct intel_gvt;
  40 struct intel_vgpu;
  41 
  42 #define D_BDW   (1 << 0)
  43 #define D_SKL   (1 << 1)
  44 #define D_KBL   (1 << 2)
  45 #define D_BXT   (1 << 3)
  46 #define D_CFL   (1 << 4)
  47 
  48 #define D_GEN9PLUS      (D_SKL | D_KBL | D_BXT | D_CFL)
  49 #define D_GEN8PLUS      (D_BDW | D_SKL | D_KBL | D_BXT | D_CFL)
  50 
  51 #define D_SKL_PLUS      (D_SKL | D_KBL | D_BXT | D_CFL)
  52 #define D_BDW_PLUS      (D_BDW | D_SKL | D_KBL | D_BXT | D_CFL)
  53 
  54 #define D_PRE_SKL       (D_BDW)
  55 #define D_ALL           (D_BDW | D_SKL | D_KBL | D_BXT | D_CFL)
  56 
  57 typedef int (*gvt_mmio_func)(struct intel_vgpu *, unsigned int, void *,
  58                              unsigned int);
  59 
  60 struct intel_gvt_mmio_info {
  61         u32 offset;
  62         u64 ro_mask;
  63         u32 device;
  64         gvt_mmio_func read;
  65         gvt_mmio_func write;
  66         u32 addr_range;
  67         struct hlist_node node;
  68 };
  69 
  70 int intel_gvt_render_mmio_to_ring_id(struct intel_gvt *gvt,
  71                 unsigned int reg);
  72 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt);
  73 bool intel_gvt_match_device(struct intel_gvt *gvt, unsigned long device);
  74 
  75 int intel_gvt_setup_mmio_info(struct intel_gvt *gvt);
  76 void intel_gvt_clean_mmio_info(struct intel_gvt *gvt);
  77 int intel_gvt_for_each_tracked_mmio(struct intel_gvt *gvt,
  78         int (*handler)(struct intel_gvt *gvt, u32 offset, void *data),
  79         void *data);
  80 
  81 int intel_vgpu_init_mmio(struct intel_vgpu *vgpu);
  82 void intel_vgpu_reset_mmio(struct intel_vgpu *vgpu, bool dmlr);
  83 void intel_vgpu_clean_mmio(struct intel_vgpu *vgpu);
  84 
  85 int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa);
  86 
  87 int intel_vgpu_emulate_mmio_read(struct intel_vgpu *vgpu, u64 pa,
  88                                 void *p_data, unsigned int bytes);
  89 int intel_vgpu_emulate_mmio_write(struct intel_vgpu *vgpu, u64 pa,
  90                                 void *p_data, unsigned int bytes);
  91 
  92 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
  93                                  void *p_data, unsigned int bytes);
  94 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
  95                                   void *p_data, unsigned int bytes);
  96 
  97 bool intel_gvt_in_force_nonpriv_whitelist(struct intel_gvt *gvt,
  98                                           unsigned int offset);
  99 
 100 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset,
 101                            void *pdata, unsigned int bytes, bool is_read);
 102 
 103 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
 104                                   void *p_data, unsigned int bytes);
 105 #endif