1// Copyright 2017 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#pragma once
6
7#if __cplusplus
8
9#include <ddk/protocol/intel-gpu-core.h>
10#include <ddk/protocol/pci.h>
11#include <ddk/protocol/i2c-impl.h>
12#include <ddktl/protocol/display-controller.h>
13
14#include <fbl/unique_ptr.h>
15#include <fbl/vector.h>
16#include <hwreg/mmio.h>
17#include <threads.h>
18
19#include "display-device.h"
20#include "dp-display.h"
21#include "gtt.h"
22#include "igd.h"
23#include "interrupts.h"
24#include "pipe.h"
25#include "power.h"
26#include "registers.h"
27#include "registers-ddi.h"
28#include "registers-dpll.h"
29#include "registers-pipe.h"
30#include "registers-transcoder.h"
31#include "hdmi-display.h"
32
33namespace i915 {
34
35typedef struct buffer_allocation {
36    uint16_t start;
37    uint16_t end;
38} buffer_allocation_t;
39
40typedef struct dpll_state {
41    bool is_hdmi;
42    union {
43        uint32_t dp_rate;
44        struct {
45            uint16_t dco_int;
46            uint16_t dco_frac;
47            uint8_t q;
48            uint8_t q_mode;
49            uint8_t k;
50            uint8_t p;
51            uint8_t cf;
52        } hdmi;
53    };
54} dpll_state_t;
55
56class Controller;
57using DeviceType = ddk::Device<Controller, ddk::Unbindable,
58                               ddk::Suspendable, ddk::Resumable, ddk::GetProtocolable>;
59
60class Controller : public DeviceType, public ddk::DisplayControllerProtocol<Controller> {
61public:
62    Controller(zx_device_t* parent);
63    ~Controller();
64
65    static bool CompareDpllStates(const dpll_state_t& a, const dpll_state_t& b);
66
67    // DDK ops
68    void DdkUnbind();
69    void DdkRelease();
70    zx_status_t DdkGetProtocol(uint32_t proto_id, void* out);
71    zx_status_t DdkSuspend(uint32_t reason);
72    zx_status_t DdkResume(uint32_t reason);
73    zx_status_t Bind(fbl::unique_ptr<i915::Controller>* controller_ptr);
74
75    // display controller protocol ops
76    void SetDisplayControllerCb(void* cb_ctx, display_controller_cb_t* cb);
77    zx_status_t ImportVmoImage(image_t* image, const zx::vmo& vmo, size_t offset);
78    void ReleaseImage(image_t* image);
79    void CheckConfiguration(const display_config_t** display_config,
80                            uint32_t* display_cfg_result, uint32_t** layer_cfg_result,
81                            uint32_t display_count);
82    void ApplyConfiguration(const display_config_t** display_config, uint32_t display_count);
83    uint32_t ComputeLinearStride(uint32_t width, zx_pixel_format_t format);
84    zx_status_t AllocateVmo(uint64_t size, zx_handle_t* vmo_out);
85
86    // gpu core ops
87    zx_status_t ReadPciConfig16(uint16_t addr, uint16_t* value_out);
88    zx_status_t MapPciMmio(uint32_t pci_bar, void** addr_out, uint64_t* size_out);
89    zx_status_t UnmapPciMmio(uint32_t pci_bar);
90    zx_status_t GetPciBti(uint32_t index, zx_handle_t* bti_out);
91    zx_status_t RegisterInterruptCallback(zx_intel_gpu_core_interrupt_callback_t callback,
92                                          void* data, uint32_t interrupt_mask);
93    zx_status_t UnregisterInterruptCallback();
94    uint64_t GttGetSize();
95    zx_status_t GttAlloc(uint64_t page_count, uint64_t* addr_out);
96    zx_status_t GttFree(uint64_t addr);
97    zx_status_t GttClear(uint64_t addr);
98    zx_status_t GttInsert(uint64_t addr, zx_handle_t buffer,
99                          uint64_t page_offset, uint64_t page_count);
100    void GpuRelease();
101
102    // i2c ops
103    uint32_t GetBusCount();
104    zx_status_t GetMaxTransferSize(uint32_t bus_id, size_t* out_size);
105    zx_status_t SetBitrate(uint32_t bus_id, uint32_t bitrate);
106    zx_status_t Transact(uint32_t bus_id, i2c_impl_op_t* ops, size_t count);
107
108    bool DpcdRead(registers::Ddi ddi, uint32_t addr, uint8_t* buf, size_t size);
109    bool DpcdWrite(registers::Ddi ddi, uint32_t addr, const uint8_t* buf, size_t size);
110
111    pci_protocol_t* pci() { return &pci_; }
112    hwreg::RegisterIo* mmio_space() { return mmio_space_.get(); }
113    Gtt* gtt() { return &gtt_; }
114    Interrupts* interrupts() { return &interrupts_; }
115    uint16_t device_id() const { return device_id_; }
116    const IgdOpRegion& igd_opregion() const { return igd_opregion_; }
117    Power* power() { return &power_; }
118
119    void HandleHotplug(registers::Ddi ddi, bool long_pulse);
120    void HandlePipeVsync(registers::Pipe pipe, zx_time_t timestamp);
121
122    void FinishInit();
123    void ResetPipe(registers::Pipe pipe) __TA_NO_THREAD_SAFETY_ANALYSIS;
124    bool ResetTrans(registers::Trans trans);
125    bool ResetDdi(registers::Ddi ddi);
126
127    const fbl::unique_ptr<GttRegion>& GetGttRegion(void* handle);
128
129    registers::Dpll SelectDpll(bool is_edp, const dpll_state_t& state);
130    const dpll_state_t* GetDpllState(registers::Dpll dpll);
131private:
132    void EnableBacklight(bool enable);
133    void InitDisplays();
134    fbl::unique_ptr<DisplayDevice> QueryDisplay(registers::Ddi ddi) __TA_REQUIRES(display_lock_);
135    bool LoadHardwareState(registers::Ddi ddi, DisplayDevice* device) __TA_REQUIRES(display_lock_);
136    zx_status_t AddDisplay(fbl::unique_ptr<DisplayDevice>&& display) __TA_REQUIRES(display_lock_);
137    bool BringUpDisplayEngine(bool resume) __TA_REQUIRES(display_lock_);
138    void InitDisplayBuffers();
139    DisplayDevice* FindDevice(uint64_t display_id) __TA_REQUIRES(display_lock_);
140
141    void CallOnDisplaysChanged(DisplayDevice** added, uint32_t added_count, uint64_t* removed,
142                               uint32_t removed_count) __TA_REQUIRES(display_lock_);
143
144    // Gets the layer_t* config for the given pipe/plane. Return false if there is no layer.
145    bool GetPlaneLayer(registers::Pipe pipe, uint32_t plane,
146                       const display_config_t** configs, uint32_t display_count,
147                       const layer_t** layer_out) __TA_REQUIRES(display_lock_);
148    uint16_t CalculateBuffersPerPipe(uint32_t display_count);
149    // Returns false if no allocation is possible. When that happens,
150    // plane 0 of the failing displays will be set to UINT16_MAX.
151    bool CalculateMinimumAllocations(const display_config_t** display_configs,
152                                     uint32_t display_count,
153                                     uint16_t min_allocs[registers::kPipeCount]
154                                                        [registers::kImagePlaneCount])
155                                     __TA_REQUIRES(display_lock_);
156    // Updates plane_buffers_ based pipe_buffers_ and the given parameters
157    void UpdateAllocations(const uint16_t min_allocs[registers::kPipeCount]
158                                                    [registers::kImagePlaneCount],
159                           const uint64_t display_rate[registers::kPipeCount]
160                                                      [registers::kImagePlaneCount])
161                           __TA_REQUIRES(display_lock_);
162    // Reallocates the pipe buffers when a pipe comes online/goes offline. This is a
163    // long-running operation, as shifting allocations between pipes requires waiting
164    // for vsync.
165    void DoPipeBufferReallocation(buffer_allocation_t active_allocation[registers::kPipeCount])
166                                  __TA_REQUIRES(display_lock_);
167    // Reallocates plane buffers based on the given layer config.
168    void ReallocatePlaneBuffers(const display_config_t** display_configs,
169                                uint32_t display_count,
170                                bool reallocate_pipes) __TA_REQUIRES(display_lock_);
171
172    // Validates that a basic layer configuration can be supported for the
173    // given modes of the displays.
174    bool CheckDisplayLimits(const display_config_t** display_configs, uint32_t display_count,
175                            uint32_t** layer_cfg_results) __TA_REQUIRES(display_lock_);
176
177    bool CalculatePipeAllocation(const display_config_t** display_config, uint32_t display_count,
178                                 uint64_t alloc[registers::kPipeCount])
179                                 __TA_REQUIRES(display_lock_);
180    bool ReallocatePipes(const display_config_t** display_config, uint32_t display_count)
181                         __TA_REQUIRES(display_lock_);
182
183    zx_device_t* zx_gpu_dev_ = nullptr;
184    bool gpu_released_ = false;
185    bool display_released_ = false;
186
187    void* dc_cb_ctx_ __TA_GUARDED(display_lock_);
188    display_controller_cb_t* dc_cb_ __TA_GUARDED(display_lock_) = nullptr;
189    bool ready_for_callback_ __TA_GUARDED(display_lock_) = false;
190
191    Gtt gtt_ __TA_GUARDED(gtt_lock_);
192    mtx_t gtt_lock_;
193    // These regions' VMOs are not owned
194    fbl::Vector<fbl::unique_ptr<GttRegion>> imported_images_ __TA_GUARDED(gtt_lock_);
195    // These regions' VMOs are owned
196    fbl::Vector<fbl::unique_ptr<GttRegion>> imported_gtt_regions_ __TA_GUARDED(gtt_lock_);
197
198    IgdOpRegion igd_opregion_; // Read only, no locking
199    Interrupts interrupts_; // Internal locking
200
201    pci_protocol_t pci_;
202    struct {
203        void* base;
204        uint64_t size;
205        zx_handle_t vmo;
206        int32_t count = 0;
207    } mapped_bars_[PCI_MAX_BAR_COUNT] __TA_GUARDED(bar_lock_);
208    mtx_t bar_lock_;
209    // The mmio_space_ unique_ptr is read only. The internal registers are
210    // guarded by various locks where appropriate.
211    fbl::unique_ptr<hwreg::RegisterIo> mmio_space_;
212
213    // References to displays. References are owned by devmgr, but will always
214    // be valid while they are in this vector.
215    fbl::Vector<fbl::unique_ptr<DisplayDevice>> display_devices_ __TA_GUARDED(display_lock_);
216    uint64_t next_id_ __TA_GUARDED(display_lock_) = 1; // id can't be INVALID_DISPLAY_ID == 0
217    mtx_t display_lock_;
218
219    Pipe pipes_[registers::kPipeCount] __TA_GUARDED(display_lock_) = {
220        Pipe(this, registers::PIPE_A), Pipe(this, registers::PIPE_B), Pipe(this, registers::PIPE_C)
221    };
222
223    Power power_;
224    PowerWellRef cd_clk_power_well_;
225    struct {
226        uint8_t use_count = 0;
227        dpll_state_t state;
228    } dplls_[registers::kDpllCount] = {};
229
230    GMBusI2c gmbus_i2cs_[registers::kDdiCount] = {
231        GMBusI2c(registers::DDI_A), GMBusI2c(registers::DDI_B), GMBusI2c(registers::DDI_C),
232        GMBusI2c(registers::DDI_D), GMBusI2c(registers::DDI_E),
233    };
234
235    DpAux dp_auxs_[registers::kDdiCount] = {
236        DpAux(registers::DDI_A), DpAux(registers::DDI_B), DpAux(registers::DDI_C),
237        DpAux(registers::DDI_D), DpAux(registers::DDI_E),
238    };
239
240    // Plane buffer allocation. If no alloc, start == end == registers::PlaneBufCfg::kBufferCount.
241    buffer_allocation_t plane_buffers_[registers::kPipeCount][registers::kImagePlaneCount]
242            __TA_GUARDED(display_lock_) = {};
243    // Buffer allocations for pipes
244    buffer_allocation_t pipe_buffers_[registers::kPipeCount] __TA_GUARDED(display_lock_) = {};
245    bool initial_alloc_ = true;
246
247    uint16_t device_id_;
248    uint32_t flags_;
249
250    // Various configuration values set by the BIOS which need to be carried across suspend.
251    uint32_t pp_divisor_val_;
252    uint32_t pp_off_delay_val_;
253    uint32_t pp_on_delay_val_;
254    uint32_t sblc_ctrl2_val_;
255    uint32_t schicken1_val_;
256    bool ddi_a_lane_capability_control_;
257    bool sblc_polarity_;
258
259    bool init_thrd_started_ = false;
260    thrd_t init_thread_;
261};
262
263} // namespace i915
264
265#endif // __cplusplus
266
267__BEGIN_CDECLS
268zx_status_t intel_i915_bind(void* ctx, zx_device_t* parent);
269__END_CDECLS
270