001/* 002 * Renderer 7. The MIT License. 003 * Copyright (c) 2022 rlkraft@pnw.edu 004 * See LICENSE for details. 005*/ 006 007package renderer.pipeline; 008 009import renderer.scene.*; 010import renderer.scene.util.CheckModel; 011import renderer.framebuffer.*; 012import static renderer.pipeline.PipelineLogger.*; 013 014import java.awt.Color; 015 016/** 017 This renderer takes as its input a {@link Scene} data structure 018 and a {@link FrameBuffer.Viewport} within a {@link FrameBuffer} 019 data structure. This renderer mutates the {@link FrameBuffer.Viewport} 020 so that it is filled in with the rendered image of the geometric 021 scene represented by the {@link Scene} object. 022<p> 023 This implements our sixth rendering pipeline. It adds a view 024 volume normalization stage, {@link View2Camera}, that converts 025 a {@link Camera}'s configurable view volume into the normalized 026 view volume used by the {@link Clip} pipeline stage. Coordinates 027 relative to the {@link Camera}'s arbitrary view volume are called 028 "view coordinates" and coordinates relative to the normalized view 029 volume are called "camera coordinates", so the new pipeline stage 030 converts vertex coordinates from view coordinates to camera 031 coordinates. There are six pipeline stages. 032*/ 033public final class Pipeline 034{ 035 // Mostly for compatibility with renderers 1 through 3. 036 public static Color DEFAULT_COLOR = Color.white; 037 038 /** 039 Mutate the {@link FrameBuffer}'s default {@link FrameBuffer.Viewport} 040 so that it holds the rendered image of the {@link Scene} object. 041 042 @param scene {@link Scene} object to render 043 @param fb {@link FrameBuffer} to hold rendered image of the {@link Scene} 044 */ 045 public static void render(final Scene scene, final FrameBuffer fb) 046 { 047 render(scene, fb.vp); // render into the default viewport 048 } 049 050 051 /** 052 Mutate the {@link FrameBuffer}'s given {@link FrameBuffer.Viewport} 053 so that it holds the rendered image of the {@link Scene} object. 054 055 @param scene {@link Scene} object to render 056 @param vp {@link FrameBuffer.Viewport} to hold rendered image of the {@link Scene} 057 */ 058 public static void render(final Scene scene, final FrameBuffer.Viewport vp) 059 { 060 PipelineLogger.debugScene = scene.debug; 061 062 logMessage("\n== Begin Rendering of Scene: " + scene.name + " =="); 063 064 logMessage("-- Current Camera:\n" + scene.camera); 065 066 // For every Position in the Scene, render the Position's Model. 067 for (final Position position : scene.positionList) 068 { 069 PipelineLogger.debugPosition = position.debug; 070 071 if ( position.visible ) 072 { 073 logMessage("==== Render position: " + position.name + " ===="); 074 075 logMessage("---- Translation vector = " + position.getTranslation()); 076 077 if ( position.getModel().visible ) 078 { 079 logMessage("====== Render model: " 080 + position.getModel().name + " ======"); 081 082 CheckModel.check(position.getModel()); 083 084 // Mostly for compatibility with renderers 1 through 3. 085 if ( position.getModel().colorList.isEmpty() 086 && !position.getModel().vertexList.isEmpty()) 087 { 088 for (int i = 0; i < position.getModel().vertexList.size(); ++i) 089 { 090 position.getModel().addColor( DEFAULT_COLOR ); 091 } 092 System.err.println("***WARNING: Added default color to model: " 093 + position.getModel().name + "."); 094 } 095 096 logVertexList("0. Model ", position.getModel()); 097 098 // 1. Apply the Position's model-to-view coordinate transformation. 099 final Model model1 = Model2View.model2view(position); 100 101 logVertexList("1. View ", model1); 102 103 // 2. Apply the Camera's normalizing view-to-camera coordinate transformation. 104 final Model model2 = View2Camera.view2camera(model1, 105 scene.camera); 106 107 logVertexList("2. Camera ", model2); 108 logColorList("2. Camera ", model2); 109 logPrimitiveList("2. Camera ", model2); 110 111 // 3. Clip primitives to the camera's near plane. 112 final Model model3 = NearClip.clip(model2, 113 scene.camera); 114 115 logVertexList("3. Near_Clipped", model3); 116 logColorList("3. Near_Clipped", model3); 117 logPrimitiveList("3. Near_Clipped", model3); 118 119 // 4. Apply the Camera's projection transformation. 120 final Model model4 = Projection.project(model3, 121 scene.camera); 122 123 logVertexList("4. Projected ", model4); 124 125 // 5. Clip primitives to the camera's view rectangle. 126 final Model model5 = Clip.clip(model4); 127 128 logVertexList("5. Clipped ", model5); 129 logColorList("5. Clipped ", model5); 130 logPrimitiveList("5. Clipped ", model5); 131 132 // 6. Rasterize every visible primitive into pixels. 133 Rasterize.rasterize(model5, vp); 134 135 logMessage("====== End model: " 136 + position.getModel().name + " ======"); 137 } 138 else 139 { 140 logMessage("====== Hidden model: " 141 + position.getModel().name + " ======"); 142 } 143 144 logMessage("==== End position: " + position.name + " ===="); 145 } 146 else 147 { 148 logMessage("==== Hidden position: " + position.name + " ===="); 149 } 150 } 151 logMessage("== End Rendering of Scene =="); 152 } 153 154 155 156 // Private default constructor to enforce noninstantiable class. 157 // See Item 4 in "Effective Java", 3rd Ed, Joshua Bloch. 158 private Pipeline() { 159 throw new AssertionError(); 160 } 161}