How to use custom shader with GLKit

Does anyone know how to use a custom shader with GLKit? Do not use the GLBaseEffect effect.

Best wishes,

+4
source share
2 answers

As far as I know, you configure and use shader programs, as is the case with non-GLKit OpenGL ES 2.0. Currently, I prefer to use the Jeff LaMarche GLProgram shell class, which it provides as part of this tutorial (the code should be linked at the top of this page), rather than rewriting all the boilerplate code to compile and link the shaders every time.

I will show some standard use of this in this answer . You still need to set up your attributes and uniforms, but as soon as you do this, you just use the program and draw your geometry in your GLKView.

Basic GLKit effects are apparently provided as skins for some standard shader programs in a similar way.

+3
source

Not my code, but I found it very useful. It even displays errors for shaders.

func compileShader(shaderName: String?, shaderType: GLenum) -> GLuint { let shaderPath = NSBundle.mainBundle().pathForResource(shaderName!, ofType: "glsl") var error: NSError? = nil var shaderString: NSString? do { shaderString = try NSString(contentsOfFile: shaderPath!, encoding: NSUTF8StringEncoding) } catch let error1 as NSError { error = error1 shaderString = nil } var shaderS = shaderString! as String shaderS += "\n" shaderString = shaderS as NSString if shaderString == nil { print("Failed to set contents shader of shader file!") } let shaderHandle: GLuint = glCreateShader(shaderType) //var shaderStringUTF8 = shaderString!.utf8 var shaderStringUTF8 = shaderString!.UTF8String //var shaderStringLength: GLint = GLint() // LOL var shaderStringLength: GLint = GLint(shaderString!.length) glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength) //the 2 is number of uniforms glCompileShader(shaderHandle) var compileSuccess: GLint = GLint() glGetShaderiv(shaderHandle, GLenum(GL_COMPILE_STATUS), &compileSuccess) if compileSuccess == GL_FALSE { print("Failed to compile shader \(shaderName!)!") var value: GLint = 0 glGetShaderiv(shaderHandle, GLenum(GL_INFO_LOG_LENGTH), &value) var infoLog: [GLchar] = [GLchar](count: Int(value), repeatedValue: 0) var infoLogLength: GLsizei = 0 glGetShaderInfoLog(shaderHandle, value, &infoLogLength, &infoLog) let s = NSString(bytes: infoLog, length: Int(infoLogLength), encoding: NSASCIIStringEncoding) print(s) exit(1) } return shaderHandle } // function compiles vertex and fragment shaders into program. Returns program handle func compileShaders() -> GLuint { let vertexShader: GLuint = self.compileShader("SimpleVertex", shaderType: GLenum(GL_VERTEX_SHADER)) let fragmentShader: GLuint = self.compileShader("SimpleFragment", shaderType: GLenum(GL_FRAGMENT_SHADER)) let programHandle: GLuint = glCreateProgram() glAttachShader(programHandle, vertexShader) glAttachShader(programHandle, fragmentShader) glLinkProgram(programHandle) var linkSuccess: GLint = GLint() glGetProgramiv(programHandle, GLenum(GL_LINK_STATUS), &linkSuccess) if linkSuccess == GL_FALSE { print("Failed to create shader program!") var value: GLint = 0 glGetProgramiv(programHandle, GLenum(GL_INFO_LOG_LENGTH), &value) var infoLog: [GLchar] = [GLchar](count: Int(value), repeatedValue: 0) var infoLogLength: GLsizei = 0 glGetProgramInfoLog(programHandle, value, &infoLogLength, &infoLog) let s = NSString(bytes: infoLog, length: Int(infoLogLength), encoding: NSASCIIStringEncoding) print(s) exit(1) } glUseProgram(programHandle) self.positionSlot = GLuint(glGetAttribLocation(programHandle, "aVertexPosition")) //self.colorSlot = GLuint(glGetAttribLocation(programHandle, "SourceColor")) self.normalSlot = GLuint(glGetAttribLocation(programHandle, "aVertexNormal")) glEnableVertexAttribArray(self.positionSlot) // glEnableVertexAttribArray(self.colorSlot) glEnableVertexAttribArray(self.normalSlot) return programHandle } 
+1
source

Source: https://habr.com/ru/post/1411435/


All Articles