]> bbs.cooldavid.org Git - net-next-2.6.git/blame - drivers/gpu/drm/radeon/radeon_asic.h
drm/radeon/kms: fix regression rendering issue on R6XX/R7XX
[net-next-2.6.git] / drivers / gpu / drm / radeon / radeon_asic.h
CommitLineData
771fe6b9
JG
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
7433874e 34uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
771fe6b9 35void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
5ea597f3 36uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
771fe6b9
JG
37void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
38
7433874e 39uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
771fe6b9 40void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
7433874e 41uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
771fe6b9
JG
42void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
44
45/*
46 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
47 */
d4550907
JG
48extern int r100_init(struct radeon_device *rdev);
49extern void r100_fini(struct radeon_device *rdev);
50extern int r100_suspend(struct radeon_device *rdev);
51extern int r100_resume(struct radeon_device *rdev);
771fe6b9
JG
52uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
28d52043 54void r100_vga_set_state(struct radeon_device *rdev, bool state);
771fe6b9 55int r100_gpu_reset(struct radeon_device *rdev);
7ed220d7 56u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
771fe6b9
JG
57void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
3ce0a23d 59void r100_cp_commit(struct radeon_device *rdev);
771fe6b9
JG
60void r100_ring_start(struct radeon_device *rdev);
61int r100_irq_set(struct radeon_device *rdev);
62int r100_irq_process(struct radeon_device *rdev);
63void r100_fence_ring_emit(struct radeon_device *rdev,
64 struct radeon_fence *fence);
65int r100_cs_parse(struct radeon_cs_parser *p);
66void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
68int r100_copy_blit(struct radeon_device *rdev,
69 uint64_t src_offset,
70 uint64_t dst_offset,
71 unsigned num_pages,
72 struct radeon_fence *fence);
e024e110
DA
73int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74 uint32_t tiling_flags, uint32_t pitch,
75 uint32_t offset, uint32_t obj_size);
76int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
c93bb85b 77void r100_bandwidth_update(struct radeon_device *rdev);
3ce0a23d 78void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
3ce0a23d 79int r100_ring_test(struct radeon_device *rdev);
429770b3
AD
80void r100_hpd_init(struct radeon_device *rdev);
81void r100_hpd_fini(struct radeon_device *rdev);
82bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83void r100_hpd_set_polarity(struct radeon_device *rdev,
84 enum radeon_hpd_id hpd);
771fe6b9
JG
85
86static struct radeon_asic r100_asic = {
068a117c 87 .init = &r100_init,
d4550907
JG
88 .fini = &r100_fini,
89 .suspend = &r100_suspend,
90 .resume = &r100_resume,
28d52043 91 .vga_set_state = &r100_vga_set_state,
771fe6b9 92 .gpu_reset = &r100_gpu_reset,
771fe6b9
JG
93 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
94 .gart_set_page = &r100_pci_gart_set_page,
3ce0a23d 95 .cp_commit = &r100_cp_commit,
771fe6b9 96 .ring_start = &r100_ring_start,
3ce0a23d
JG
97 .ring_test = &r100_ring_test,
98 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9
JG
99 .irq_set = &r100_irq_set,
100 .irq_process = &r100_irq_process,
7ed220d7 101 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
102 .fence_ring_emit = &r100_fence_ring_emit,
103 .cs_parse = &r100_cs_parse,
104 .copy_blit = &r100_copy_blit,
105 .copy_dma = NULL,
106 .copy = &r100_copy_blit,
7433874e 107 .get_engine_clock = &radeon_legacy_get_engine_clock,
771fe6b9 108 .set_engine_clock = &radeon_legacy_set_engine_clock,
5ea597f3 109 .get_memory_clock = &radeon_legacy_get_memory_clock,
771fe6b9
JG
110 .set_memory_clock = NULL,
111 .set_pcie_lanes = NULL,
112 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
113 .set_surface_reg = r100_set_surface_reg,
114 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 115 .bandwidth_update = &r100_bandwidth_update,
429770b3
AD
116 .hpd_init = &r100_hpd_init,
117 .hpd_fini = &r100_hpd_fini,
118 .hpd_sense = &r100_hpd_sense,
119 .hpd_set_polarity = &r100_hpd_set_polarity,
062b389c 120 .ioctl_wait_idle = NULL,
771fe6b9
JG
121};
122
123
124/*
125 * r300,r350,rv350,rv380
126 */
207bf9e9
JG
127extern int r300_init(struct radeon_device *rdev);
128extern void r300_fini(struct radeon_device *rdev);
129extern int r300_suspend(struct radeon_device *rdev);
130extern int r300_resume(struct radeon_device *rdev);
131extern int r300_gpu_reset(struct radeon_device *rdev);
132extern void r300_ring_start(struct radeon_device *rdev);
133extern void r300_fence_ring_emit(struct radeon_device *rdev,
134 struct radeon_fence *fence);
135extern int r300_cs_parse(struct radeon_cs_parser *p);
136extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
137extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
138extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
139extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
140extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
141extern int r300_copy_dma(struct radeon_device *rdev,
142 uint64_t src_offset,
143 uint64_t dst_offset,
144 unsigned num_pages,
145 struct radeon_fence *fence);
771fe6b9 146static struct radeon_asic r300_asic = {
068a117c 147 .init = &r300_init,
207bf9e9
JG
148 .fini = &r300_fini,
149 .suspend = &r300_suspend,
150 .resume = &r300_resume,
28d52043 151 .vga_set_state = &r100_vga_set_state,
771fe6b9 152 .gpu_reset = &r300_gpu_reset,
771fe6b9
JG
153 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
154 .gart_set_page = &r100_pci_gart_set_page,
3ce0a23d 155 .cp_commit = &r100_cp_commit,
771fe6b9 156 .ring_start = &r300_ring_start,
3ce0a23d
JG
157 .ring_test = &r100_ring_test,
158 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9
JG
159 .irq_set = &r100_irq_set,
160 .irq_process = &r100_irq_process,
7ed220d7 161 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
162 .fence_ring_emit = &r300_fence_ring_emit,
163 .cs_parse = &r300_cs_parse,
164 .copy_blit = &r100_copy_blit,
165 .copy_dma = &r300_copy_dma,
166 .copy = &r100_copy_blit,
7433874e 167 .get_engine_clock = &radeon_legacy_get_engine_clock,
771fe6b9 168 .set_engine_clock = &radeon_legacy_set_engine_clock,
5ea597f3 169 .get_memory_clock = &radeon_legacy_get_memory_clock,
771fe6b9
JG
170 .set_memory_clock = NULL,
171 .set_pcie_lanes = &rv370_set_pcie_lanes,
172 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
173 .set_surface_reg = r100_set_surface_reg,
174 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 175 .bandwidth_update = &r100_bandwidth_update,
429770b3
AD
176 .hpd_init = &r100_hpd_init,
177 .hpd_fini = &r100_hpd_fini,
178 .hpd_sense = &r100_hpd_sense,
179 .hpd_set_polarity = &r100_hpd_set_polarity,
062b389c 180 .ioctl_wait_idle = NULL,
771fe6b9
JG
181};
182
183/*
184 * r420,r423,rv410
185 */
9f022ddf
JG
186extern int r420_init(struct radeon_device *rdev);
187extern void r420_fini(struct radeon_device *rdev);
188extern int r420_suspend(struct radeon_device *rdev);
189extern int r420_resume(struct radeon_device *rdev);
771fe6b9 190static struct radeon_asic r420_asic = {
9f022ddf
JG
191 .init = &r420_init,
192 .fini = &r420_fini,
193 .suspend = &r420_suspend,
194 .resume = &r420_resume,
28d52043 195 .vga_set_state = &r100_vga_set_state,
771fe6b9 196 .gpu_reset = &r300_gpu_reset,
771fe6b9
JG
197 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
198 .gart_set_page = &rv370_pcie_gart_set_page,
3ce0a23d 199 .cp_commit = &r100_cp_commit,
771fe6b9 200 .ring_start = &r300_ring_start,
3ce0a23d
JG
201 .ring_test = &r100_ring_test,
202 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9
JG
203 .irq_set = &r100_irq_set,
204 .irq_process = &r100_irq_process,
7ed220d7 205 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
206 .fence_ring_emit = &r300_fence_ring_emit,
207 .cs_parse = &r300_cs_parse,
208 .copy_blit = &r100_copy_blit,
209 .copy_dma = &r300_copy_dma,
210 .copy = &r100_copy_blit,
7433874e 211 .get_engine_clock = &radeon_atom_get_engine_clock,
771fe6b9 212 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 213 .get_memory_clock = &radeon_atom_get_memory_clock,
771fe6b9
JG
214 .set_memory_clock = &radeon_atom_set_memory_clock,
215 .set_pcie_lanes = &rv370_set_pcie_lanes,
216 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
217 .set_surface_reg = r100_set_surface_reg,
218 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 219 .bandwidth_update = &r100_bandwidth_update,
429770b3
AD
220 .hpd_init = &r100_hpd_init,
221 .hpd_fini = &r100_hpd_fini,
222 .hpd_sense = &r100_hpd_sense,
223 .hpd_set_polarity = &r100_hpd_set_polarity,
062b389c 224 .ioctl_wait_idle = NULL,
771fe6b9
JG
225};
226
227
228/*
229 * rs400,rs480
230 */
ca6ffc64
JG
231extern int rs400_init(struct radeon_device *rdev);
232extern void rs400_fini(struct radeon_device *rdev);
233extern int rs400_suspend(struct radeon_device *rdev);
234extern int rs400_resume(struct radeon_device *rdev);
771fe6b9
JG
235void rs400_gart_tlb_flush(struct radeon_device *rdev);
236int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
237uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
238void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
239static struct radeon_asic rs400_asic = {
ca6ffc64
JG
240 .init = &rs400_init,
241 .fini = &rs400_fini,
242 .suspend = &rs400_suspend,
243 .resume = &rs400_resume,
28d52043 244 .vga_set_state = &r100_vga_set_state,
771fe6b9 245 .gpu_reset = &r300_gpu_reset,
771fe6b9
JG
246 .gart_tlb_flush = &rs400_gart_tlb_flush,
247 .gart_set_page = &rs400_gart_set_page,
3ce0a23d 248 .cp_commit = &r100_cp_commit,
771fe6b9 249 .ring_start = &r300_ring_start,
3ce0a23d
JG
250 .ring_test = &r100_ring_test,
251 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9
JG
252 .irq_set = &r100_irq_set,
253 .irq_process = &r100_irq_process,
7ed220d7 254 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
255 .fence_ring_emit = &r300_fence_ring_emit,
256 .cs_parse = &r300_cs_parse,
257 .copy_blit = &r100_copy_blit,
258 .copy_dma = &r300_copy_dma,
259 .copy = &r100_copy_blit,
7433874e 260 .get_engine_clock = &radeon_legacy_get_engine_clock,
771fe6b9 261 .set_engine_clock = &radeon_legacy_set_engine_clock,
5ea597f3 262 .get_memory_clock = &radeon_legacy_get_memory_clock,
771fe6b9
JG
263 .set_memory_clock = NULL,
264 .set_pcie_lanes = NULL,
265 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
266 .set_surface_reg = r100_set_surface_reg,
267 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 268 .bandwidth_update = &r100_bandwidth_update,
429770b3
AD
269 .hpd_init = &r100_hpd_init,
270 .hpd_fini = &r100_hpd_fini,
271 .hpd_sense = &r100_hpd_sense,
272 .hpd_set_polarity = &r100_hpd_set_polarity,
062b389c 273 .ioctl_wait_idle = NULL,
771fe6b9
JG
274};
275
276
277/*
278 * rs600.
279 */
c010f800
JG
280extern int rs600_init(struct radeon_device *rdev);
281extern void rs600_fini(struct radeon_device *rdev);
282extern int rs600_suspend(struct radeon_device *rdev);
283extern int rs600_resume(struct radeon_device *rdev);
771fe6b9 284int rs600_irq_set(struct radeon_device *rdev);
7ed220d7
MD
285int rs600_irq_process(struct radeon_device *rdev);
286u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
771fe6b9
JG
287void rs600_gart_tlb_flush(struct radeon_device *rdev);
288int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
289uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
290void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 291void rs600_bandwidth_update(struct radeon_device *rdev);
429770b3
AD
292void rs600_hpd_init(struct radeon_device *rdev);
293void rs600_hpd_fini(struct radeon_device *rdev);
294bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
295void rs600_hpd_set_polarity(struct radeon_device *rdev,
296 enum radeon_hpd_id hpd);
297
771fe6b9 298static struct radeon_asic rs600_asic = {
3f7dc91a 299 .init = &rs600_init,
c010f800
JG
300 .fini = &rs600_fini,
301 .suspend = &rs600_suspend,
302 .resume = &rs600_resume,
28d52043 303 .vga_set_state = &r100_vga_set_state,
771fe6b9 304 .gpu_reset = &r300_gpu_reset,
771fe6b9
JG
305 .gart_tlb_flush = &rs600_gart_tlb_flush,
306 .gart_set_page = &rs600_gart_set_page,
3ce0a23d 307 .cp_commit = &r100_cp_commit,
771fe6b9 308 .ring_start = &r300_ring_start,
3ce0a23d
JG
309 .ring_test = &r100_ring_test,
310 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9 311 .irq_set = &rs600_irq_set,
7ed220d7
MD
312 .irq_process = &rs600_irq_process,
313 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9
JG
314 .fence_ring_emit = &r300_fence_ring_emit,
315 .cs_parse = &r300_cs_parse,
316 .copy_blit = &r100_copy_blit,
317 .copy_dma = &r300_copy_dma,
318 .copy = &r100_copy_blit,
7433874e 319 .get_engine_clock = &radeon_atom_get_engine_clock,
771fe6b9 320 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 321 .get_memory_clock = &radeon_atom_get_memory_clock,
771fe6b9
JG
322 .set_memory_clock = &radeon_atom_set_memory_clock,
323 .set_pcie_lanes = NULL,
324 .set_clock_gating = &radeon_atom_set_clock_gating,
c93bb85b 325 .bandwidth_update = &rs600_bandwidth_update,
429770b3
AD
326 .hpd_init = &rs600_hpd_init,
327 .hpd_fini = &rs600_hpd_fini,
328 .hpd_sense = &rs600_hpd_sense,
329 .hpd_set_polarity = &rs600_hpd_set_polarity,
062b389c 330 .ioctl_wait_idle = NULL,
771fe6b9
JG
331};
332
333
334/*
335 * rs690,rs740
336 */
3bc68535
JG
337int rs690_init(struct radeon_device *rdev);
338void rs690_fini(struct radeon_device *rdev);
339int rs690_resume(struct radeon_device *rdev);
340int rs690_suspend(struct radeon_device *rdev);
771fe6b9
JG
341uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
342void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 343void rs690_bandwidth_update(struct radeon_device *rdev);
771fe6b9 344static struct radeon_asic rs690_asic = {
3bc68535
JG
345 .init = &rs690_init,
346 .fini = &rs690_fini,
347 .suspend = &rs690_suspend,
348 .resume = &rs690_resume,
28d52043 349 .vga_set_state = &r100_vga_set_state,
771fe6b9 350 .gpu_reset = &r300_gpu_reset,
771fe6b9
JG
351 .gart_tlb_flush = &rs400_gart_tlb_flush,
352 .gart_set_page = &rs400_gart_set_page,
3ce0a23d 353 .cp_commit = &r100_cp_commit,
771fe6b9 354 .ring_start = &r300_ring_start,
3ce0a23d
JG
355 .ring_test = &r100_ring_test,
356 .ring_ib_execute = &r100_ring_ib_execute,
771fe6b9 357 .irq_set = &rs600_irq_set,
7ed220d7
MD
358 .irq_process = &rs600_irq_process,
359 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9
JG
360 .fence_ring_emit = &r300_fence_ring_emit,
361 .cs_parse = &r300_cs_parse,
362 .copy_blit = &r100_copy_blit,
363 .copy_dma = &r300_copy_dma,
364 .copy = &r300_copy_dma,
7433874e 365 .get_engine_clock = &radeon_atom_get_engine_clock,
771fe6b9 366 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 367 .get_memory_clock = &radeon_atom_get_memory_clock,
771fe6b9
JG
368 .set_memory_clock = &radeon_atom_set_memory_clock,
369 .set_pcie_lanes = NULL,
370 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
371 .set_surface_reg = r100_set_surface_reg,
372 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 373 .bandwidth_update = &rs690_bandwidth_update,
429770b3
AD
374 .hpd_init = &rs600_hpd_init,
375 .hpd_fini = &rs600_hpd_fini,
376 .hpd_sense = &rs600_hpd_sense,
377 .hpd_set_polarity = &rs600_hpd_set_polarity,
062b389c 378 .ioctl_wait_idle = NULL,
771fe6b9
JG
379};
380
381
382/*
383 * rv515
384 */
068a117c 385int rv515_init(struct radeon_device *rdev);
d39c3b89 386void rv515_fini(struct radeon_device *rdev);
771fe6b9 387int rv515_gpu_reset(struct radeon_device *rdev);
771fe6b9
JG
388uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
389void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
390void rv515_ring_start(struct radeon_device *rdev);
391uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
392void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 393void rv515_bandwidth_update(struct radeon_device *rdev);
d39c3b89
JG
394int rv515_resume(struct radeon_device *rdev);
395int rv515_suspend(struct radeon_device *rdev);
771fe6b9 396static struct radeon_asic rv515_asic = {
068a117c 397 .init = &rv515_init,
d39c3b89
JG
398 .fini = &rv515_fini,
399 .suspend = &rv515_suspend,
400 .resume = &rv515_resume,
28d52043 401 .vga_set_state = &r100_vga_set_state,
771fe6b9 402 .gpu_reset = &rv515_gpu_reset,
771fe6b9
JG
403 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
404 .gart_set_page = &rv370_pcie_gart_set_page,
3ce0a23d 405 .cp_commit = &r100_cp_commit,
771fe6b9 406 .ring_start = &rv515_ring_start,
3ce0a23d
JG
407 .ring_test = &r100_ring_test,
408 .ring_ib_execute = &r100_ring_ib_execute,
7ed220d7
MD
409 .irq_set = &rs600_irq_set,
410 .irq_process = &rs600_irq_process,
411 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9 412 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 413 .cs_parse = &r300_cs_parse,
771fe6b9
JG
414 .copy_blit = &r100_copy_blit,
415 .copy_dma = &r300_copy_dma,
416 .copy = &r100_copy_blit,
7433874e 417 .get_engine_clock = &radeon_atom_get_engine_clock,
771fe6b9 418 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 419 .get_memory_clock = &radeon_atom_get_memory_clock,
771fe6b9
JG
420 .set_memory_clock = &radeon_atom_set_memory_clock,
421 .set_pcie_lanes = &rv370_set_pcie_lanes,
422 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
423 .set_surface_reg = r100_set_surface_reg,
424 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 425 .bandwidth_update = &rv515_bandwidth_update,
429770b3
AD
426 .hpd_init = &rs600_hpd_init,
427 .hpd_fini = &rs600_hpd_fini,
428 .hpd_sense = &rs600_hpd_sense,
429 .hpd_set_polarity = &rs600_hpd_set_polarity,
062b389c 430 .ioctl_wait_idle = NULL,
771fe6b9
JG
431};
432
433
434/*
435 * r520,rv530,rv560,rv570,r580
436 */
d39c3b89 437int r520_init(struct radeon_device *rdev);
f0ed1f65 438int r520_resume(struct radeon_device *rdev);
771fe6b9 439static struct radeon_asic r520_asic = {
d39c3b89 440 .init = &r520_init,
f0ed1f65
JG
441 .fini = &rv515_fini,
442 .suspend = &rv515_suspend,
443 .resume = &r520_resume,
28d52043 444 .vga_set_state = &r100_vga_set_state,
771fe6b9 445 .gpu_reset = &rv515_gpu_reset,
771fe6b9
JG
446 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
447 .gart_set_page = &rv370_pcie_gart_set_page,
3ce0a23d 448 .cp_commit = &r100_cp_commit,
771fe6b9 449 .ring_start = &rv515_ring_start,
3ce0a23d
JG
450 .ring_test = &r100_ring_test,
451 .ring_ib_execute = &r100_ring_ib_execute,
7ed220d7
MD
452 .irq_set = &rs600_irq_set,
453 .irq_process = &rs600_irq_process,
454 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9 455 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 456 .cs_parse = &r300_cs_parse,
771fe6b9
JG
457 .copy_blit = &r100_copy_blit,
458 .copy_dma = &r300_copy_dma,
459 .copy = &r100_copy_blit,
7433874e 460 .get_engine_clock = &radeon_atom_get_engine_clock,
771fe6b9 461 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 462 .get_memory_clock = &radeon_atom_get_memory_clock,
771fe6b9
JG
463 .set_memory_clock = &radeon_atom_set_memory_clock,
464 .set_pcie_lanes = &rv370_set_pcie_lanes,
465 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
466 .set_surface_reg = r100_set_surface_reg,
467 .clear_surface_reg = r100_clear_surface_reg,
f0ed1f65 468 .bandwidth_update = &rv515_bandwidth_update,
429770b3
AD
469 .hpd_init = &rs600_hpd_init,
470 .hpd_fini = &rs600_hpd_fini,
471 .hpd_sense = &rs600_hpd_sense,
472 .hpd_set_polarity = &rs600_hpd_set_polarity,
062b389c 473 .ioctl_wait_idle = NULL,
771fe6b9
JG
474};
475
476/*
3ce0a23d 477 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
771fe6b9 478 */
3ce0a23d
JG
479int r600_init(struct radeon_device *rdev);
480void r600_fini(struct radeon_device *rdev);
481int r600_suspend(struct radeon_device *rdev);
482int r600_resume(struct radeon_device *rdev);
28d52043 483void r600_vga_set_state(struct radeon_device *rdev, bool state);
3ce0a23d
JG
484int r600_wb_init(struct radeon_device *rdev);
485void r600_wb_fini(struct radeon_device *rdev);
486void r600_cp_commit(struct radeon_device *rdev);
487void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
771fe6b9
JG
488uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
489void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
3ce0a23d
JG
490int r600_cs_parse(struct radeon_cs_parser *p);
491void r600_fence_ring_emit(struct radeon_device *rdev,
492 struct radeon_fence *fence);
493int r600_copy_dma(struct radeon_device *rdev,
494 uint64_t src_offset,
495 uint64_t dst_offset,
496 unsigned num_pages,
497 struct radeon_fence *fence);
498int r600_irq_process(struct radeon_device *rdev);
499int r600_irq_set(struct radeon_device *rdev);
500int r600_gpu_reset(struct radeon_device *rdev);
501int r600_set_surface_reg(struct radeon_device *rdev, int reg,
502 uint32_t tiling_flags, uint32_t pitch,
503 uint32_t offset, uint32_t obj_size);
504int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
505void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
3ce0a23d
JG
506int r600_ring_test(struct radeon_device *rdev);
507int r600_copy_blit(struct radeon_device *rdev,
508 uint64_t src_offset, uint64_t dst_offset,
509 unsigned num_pages, struct radeon_fence *fence);
429770b3
AD
510void r600_hpd_init(struct radeon_device *rdev);
511void r600_hpd_fini(struct radeon_device *rdev);
512bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
513void r600_hpd_set_polarity(struct radeon_device *rdev,
514 enum radeon_hpd_id hpd);
062b389c 515extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
3ce0a23d
JG
516
517static struct radeon_asic r600_asic = {
3ce0a23d
JG
518 .init = &r600_init,
519 .fini = &r600_fini,
520 .suspend = &r600_suspend,
521 .resume = &r600_resume,
522 .cp_commit = &r600_cp_commit,
28d52043 523 .vga_set_state = &r600_vga_set_state,
3ce0a23d 524 .gpu_reset = &r600_gpu_reset,
3ce0a23d
JG
525 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
526 .gart_set_page = &rs600_gart_set_page,
3ce0a23d
JG
527 .ring_test = &r600_ring_test,
528 .ring_ib_execute = &r600_ring_ib_execute,
3ce0a23d
JG
529 .irq_set = &r600_irq_set,
530 .irq_process = &r600_irq_process,
d8f60cfc 531 .get_vblank_counter = &rs600_get_vblank_counter,
3ce0a23d
JG
532 .fence_ring_emit = &r600_fence_ring_emit,
533 .cs_parse = &r600_cs_parse,
534 .copy_blit = &r600_copy_blit,
535 .copy_dma = &r600_copy_blit,
a3812877 536 .copy = &r600_copy_blit,
7433874e 537 .get_engine_clock = &radeon_atom_get_engine_clock,
3ce0a23d 538 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 539 .get_memory_clock = &radeon_atom_get_memory_clock,
3ce0a23d
JG
540 .set_memory_clock = &radeon_atom_set_memory_clock,
541 .set_pcie_lanes = NULL,
542 .set_clock_gating = &radeon_atom_set_clock_gating,
543 .set_surface_reg = r600_set_surface_reg,
544 .clear_surface_reg = r600_clear_surface_reg,
f0ed1f65 545 .bandwidth_update = &rv515_bandwidth_update,
429770b3
AD
546 .hpd_init = &r600_hpd_init,
547 .hpd_fini = &r600_hpd_fini,
548 .hpd_sense = &r600_hpd_sense,
549 .hpd_set_polarity = &r600_hpd_set_polarity,
062b389c 550 .ioctl_wait_idle = r600_ioctl_wait_idle,
3ce0a23d
JG
551};
552
553/*
554 * rv770,rv730,rv710,rv740
555 */
556int rv770_init(struct radeon_device *rdev);
557void rv770_fini(struct radeon_device *rdev);
558int rv770_suspend(struct radeon_device *rdev);
559int rv770_resume(struct radeon_device *rdev);
560int rv770_gpu_reset(struct radeon_device *rdev);
561
562static struct radeon_asic rv770_asic = {
3ce0a23d
JG
563 .init = &rv770_init,
564 .fini = &rv770_fini,
565 .suspend = &rv770_suspend,
566 .resume = &rv770_resume,
567 .cp_commit = &r600_cp_commit,
3ce0a23d 568 .gpu_reset = &rv770_gpu_reset,
28d52043 569 .vga_set_state = &r600_vga_set_state,
3ce0a23d
JG
570 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
571 .gart_set_page = &rs600_gart_set_page,
3ce0a23d
JG
572 .ring_test = &r600_ring_test,
573 .ring_ib_execute = &r600_ring_ib_execute,
3ce0a23d
JG
574 .irq_set = &r600_irq_set,
575 .irq_process = &r600_irq_process,
d8f60cfc 576 .get_vblank_counter = &rs600_get_vblank_counter,
3ce0a23d
JG
577 .fence_ring_emit = &r600_fence_ring_emit,
578 .cs_parse = &r600_cs_parse,
579 .copy_blit = &r600_copy_blit,
580 .copy_dma = &r600_copy_blit,
a3812877 581 .copy = &r600_copy_blit,
7433874e 582 .get_engine_clock = &radeon_atom_get_engine_clock,
3ce0a23d 583 .set_engine_clock = &radeon_atom_set_engine_clock,
7433874e 584 .get_memory_clock = &radeon_atom_get_memory_clock,
3ce0a23d
JG
585 .set_memory_clock = &radeon_atom_set_memory_clock,
586 .set_pcie_lanes = NULL,
587 .set_clock_gating = &radeon_atom_set_clock_gating,
588 .set_surface_reg = r600_set_surface_reg,
589 .clear_surface_reg = r600_clear_surface_reg,
f0ed1f65 590 .bandwidth_update = &rv515_bandwidth_update,
429770b3
AD
591 .hpd_init = &r600_hpd_init,
592 .hpd_fini = &r600_hpd_fini,
593 .hpd_sense = &r600_hpd_sense,
594 .hpd_set_polarity = &r600_hpd_set_polarity,
062b389c 595 .ioctl_wait_idle = r600_ioctl_wait_idle,
3ce0a23d 596};
771fe6b9
JG
597
598#endif