Reputation: 127
I am developing an android application in opengl ES2.0.In this Application I used to draw multiple lines and circles by touch event in GL surfaceView.
As opengl depends on GPU, Currently it works fine in Google Nexus 7(ULP GeForce).
In Samsung Galaxy Note 2(MALI 400MP) I'm trying to draw more than one line, but it clears the previous line and draw current line as new.
In Sony Xperia Neo V(Adreno 205) I'm trying to draw a new line, it crashes the surface as shown in below image.
Is it possible to make it work on all devices or do I need to write code for Individual GPU?
//in OnCreate method of my activity, i set the glsurfaceview and renderer
final ActivityManager activityManager =
( ActivityManager ) getSystemService( Context.ACTIVITY_SERVICE );
final ConfigurationInfo configurationInfo =
activityManager.getDeviceConfigurationInfo( );
final boolean supportsEs2 = ( configurationInfo.reqGlEsVersion >= 0x20000
|| Build.FINGERPRINT.startsWith( "generic" ) );
if( supportsEs2 ) {
Log.i( "JO", "configurationInfo.reqGlEsVersion:"
+ configurationInfo.reqGlEsVersion + "supportsEs2:"
+ supportsEs2 );
// Request an OpenGL ES 2.0 compatible context.
myGlsurfaceView.setEGLContextClientVersion( 2 );
final DisplayMetrics displayMetrics = new DisplayMetrics( );
getWindowManager( ).getDefaultDisplay( ).getMetrics( displayMetrics );
// Set the renderer to our demo renderer, defined below.
myRenderer = new MyRenderer( this, myGlsurfaceView );
myGlsurfaceView.setRenderer( myRenderer, displayMetrics.density );
myGlsurfaceView.setRenderMode( GLSurfaceView.RENDERMODE_CONTINUOUSLY );
MyGLSurfaceView.java
//in this im getting the coordinates of my touch on the glSurfaceView to draw the line and //passing those points to the renderer class
public MyGLsurfaceview( Context context ) {
super( context );
Log.i( "JO", "MyGLsurfaceview1" );
}
public MyGLsurfaceview(
Context context,
AttributeSet attrs )
{
super( context, attrs );
con = context;
mActivity = new MainActivity( );
mActivity.myGlsurfaceView = this;
Log.i( "JO", "MyGLsurfaceview2" );
}
public void setRenderer(
MyRenderer renderer,
float density )
{
Log.i( "JO", "setRenderer" );
myRenderer = renderer;
myDensity = density;
mGestureDetector = new GestureDetector( con, mGestureListener );
super.setRenderer( renderer );
setRenderMode( GLSurfaceView.RENDERMODE_CONTINUOUSLY );
}
@Override public boolean onTouchEvent( MotionEvent ev ) {
boolean retVal = mGestureDetector.onTouchEvent( ev );
if( myline ) {
switch ( ev.getAction( ) ) {
case MotionEvent.ACTION_DOWN:
isLUp = false;
if( count == 1 ) {
dx = ev.getX( );
dy = ev.getY( );
dx = ( dx / ( getWidth( ) / 2 ) ) - 1;
dy = 1 - ( dy / ( getHeight( ) / 2 ) );
firstX = dx;
firstY = dy;
} else if( count == 2 ) {
ux = ev.getX( );
uy = ev.getY( );
ux = ( ux / ( getWidth( ) / 2 ) ) - 1;
uy = 1 - ( uy / ( getHeight( ) / 2 ) );
secondX = ux;
secondY = uy;
myRenderer.dx = firstX;
myRenderer.dy = firstY;
myRenderer.ux = secondX;
myRenderer.uy = secondY;
midX = ( firstX + secondX ) / 2;
midY = ( firstY + secondY ) / 2;
Log.e( "JO",
"Line:firstX" + firstX +
"firstY" + firstY );
lp = new LinePoints( firstX, firstY,
secondX, secondY,
midX, midY );
lineArray.add( lp );
myRenderer.isNewClick = false;
myRenderer.isEnteredAngle = false;
myRenderer.myline = true;
myRenderer.mycircle = false;
myRenderer.mydashedline = false;
myRenderer.eraseCircle = false;
myRenderer.eraseLine = false;
myRenderer.eraseSelCir = false;
myRenderer.angle = angle;
myRenderer.length = length;
requestRender( );
count = 0;
}
count++;
break;
case MotionEvent.ACTION_MOVE:
isLUp = true;
break;
case MotionEvent.ACTION_UP:
if( isLUp ) {
ux = ev.getX( );
uy = ev.getY( );
ux = ( ux / ( getWidth( ) / 2 ) ) - 1;
uy = 1 - ( uy / ( getHeight( ) / 2 ) );
Log.i( "JO", "line2:" + ux + "," + uy );
secondX = ux;
secondY = uy;
myRenderer.dx = firstX;
myRenderer.dy = firstY;
myRenderer.ux = secondX;
myRenderer.uy = secondY;
midX = ( firstX + secondX ) / 2;
midY = ( firstY + secondY ) / 2;
Log.e( "JO",
"Line:firstX" + firstX +
"firstY" + firstY );
lp = new LinePoints( firstX, firstY,
secondX, secondY,
midX, midY );
lineArray.add( lp );
myRenderer.isNewClick = false;
myRenderer.isEnteredAngle = false;
myRenderer.myline = true;
myRenderer.mycircle = false;
myRenderer.mydashedline = false;
myRenderer.mysnaptoedge = false;
myRenderer.mysnaptoMiddle = false;
myRenderer.eraseCircle = false;
myRenderer.eraseLine = false;
myRenderer.eraseSelCir = false;
count = 1;
requestRender( );
}
break;
}
}
}
}
//renderer class to render the line to the glsurfaceview
Lines line;
public MyRenderer(
MainActivity mainActivity,
MyGLsurfaceview myGlsurfaceView )
{
Log.i( "JO", "MyRenderer" );
this.main = mainActivity;
myGlsurface = myGlsurfaceView;
}
public void onDrawFrame(
GL10 gl )
{
line.draw( dx, dy, ux, uy );
}
@Override public void onSurfaceCreated(
GL10 gl,
EGLConfig config )
{
Log.i( "JO", "onSurfaceCreated" );
// Set the background frame color
GLES20.glClearColor( 0.0f, 0.0f, 0.0f, 1.0f );
// Create the GLText
glText = new GLText( main.getAssets( ) );
// Load the font from file (set size + padding), creates the texture
// NOTE: after a successful call to this the font is ready for
// rendering!
glText.load( "Roboto-Regular.ttf", 14, 2, 2 ); // Create Font (Height: 14
// Pixels / X+Y Padding
// 2 Pixels)
// enable texture + alpha blending
GLES20.glEnable( GLES20.GL_BLEND );
GLES20.glBlendFunc( GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA );
}
@Override public void onSurfaceChanged(
GL10 gl,
int width,
int height )
{
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport( 0, 0, width, height );
ratio = ( float ) width / height;
width_surface = width;
height_surface = height;
/*
* // this projection matrix is applied to object coordinates // in the
* onDrawFrame() method Matrix.frustumM(mProjMatrix, 0, -ratio, ratio,
* -1, 1, 3, 7);
*/
// Take into account device orientation
if( width > height ) {
Matrix.frustumM( mProjMatrix, 0, -ratio, ratio, -1, 1, 1, 10 );
} else {
Matrix.frustumM( mProjMatrix, 0, -1, 1, -1 / ratio, 1 / ratio,
1, 10 );
}
// Save width and height
this.width = width; // Save Current Width
this.height = height; // Save Current Height
int useForOrtho = Math.min( width, height );
// TODO: Is this wrong?
Matrix.orthoM( mVMatrix, 0, -useForOrtho / 2, useForOrtho / 2,
-useForOrtho / 2, useForOrtho / 2, 0.1f, 100f );
}
//Line class to draw line
public class Lines
{
final String vertexShaderCode = "attribute vec4 vPosition;"
+ "void main() {" + " gl_Position = vPosition;" + "}";
final String fragmentShaderCode = "precision mediump float;"
+ "uniform vec4 vColor;" + "void main() {"
+ " gl_FragColor = vColor;" + "}";
final FloatBuffer vertexBuffer;
final int mProgram;
int mPositionHandle;
int mColorHandle;
// number of coordinates per vertex in this array
final int COORDS_PER_VERTEX = 3;
float lineCoords[] = new float[6];
final int vertexCount = lineCoords.length / COORDS_PER_VERTEX;
final int vertexStride = COORDS_PER_VERTEX * 4; // bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float lcolor[] = { 1.0f, 1.0f, 1.0f, 1.0f };
public Lines(
)
{
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
lineCoords.
length * 4 );
// use the device hardware's native byte order
bb.order( ByteOrder.nativeOrder( ) );
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer( );
// prepare shaders and OpenGL program
int vertexShader =
MyRenderer.loadShader( GLES20.GL_VERTEX_SHADER,
vertexShaderCode );
int fragmentShader =
MyRenderer.loadShader( GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode );
mProgram = GLES20.glCreateProgram( ); // create empty OpenGL Program
GLES20.glAttachShader( mProgram, vertexShader ); // add the vertex shader
// to program
GLES20.glAttachShader( mProgram, fragmentShader ); // add the fragment
// shader to program
GLES20.glLinkProgram( mProgram ); // create OpenGL program executables
}
public void draw(
float dX,
float dY,
float uX,
float uY )
{
lineCoords[0] = dX;
lineCoords[1] = dY;
lineCoords[2] = 0.0f;
lineCoords[3] = uX;
lineCoords[4] = uY;
lineCoords[5] = 0.0f;
Log.i( "JO",
"lineCoords:" + lineCoords[0] + "," + lineCoords[1] +
"," + lineCoords[3] + "," + lineCoords[4] );
vertexBuffer.put( lineCoords );
vertexBuffer.position( 0 );
// Add program to OpenGL environment
GLES20.glUseProgram( mProgram );
// get handle to vertex shader's vPosition member
mPositionHandle =
GLES20.glGetAttribLocation( mProgram, "vPosition" );
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray( mPositionHandle );
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer( mPositionHandle,
COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer );
// get handle to fragment shader's vColor member
mColorHandle =
GLES20.glGetUniformLocation( mProgram, "vColor" );
// Set color for drawing the triangle
GLES20.glUniform4fv( mColorHandle, 1, lcolor, 0 );
GLES20.glLineWidth( 3 );
// Draw the triangle
GLES20.glDrawArrays( GLES20.GL_LINES, 0, vertexCount );
// Disable vertex array
GLES20.glDisableVertexAttribArray( mPositionHandle );
}
}
Upvotes: 6
Views: 2071
Reputation: 8892
Crossposted from my answer to a similar question Why my opengl output differs for various devices?:
Should we take into account of GPU while Coding ? No way, The OpenGL API is a layer between your application and the hardware.
This is largely correct for desktop graphics as all GPUs are immediate renderers, however, this is NOT the case in mobile graphics.
The Mali GPUs use tile-based immediate-mode rendering. For this type of rendering, the framebuffer is divided into tiles of size 16 by 16 pixels. The Polygon List Builder (PLB) organizes input data from the application into polygon lists. There is a polygon list for each tile. When a primitive covers part of a tile, an entry, called a polygon list command, is added to the polygon list for the tile. The pixel processor takes the polygon list for one tile and computes values for all pixels in that tile before starting work on the next tile. Because this tile-based approach uses a fast, on-chip tile buffer, the GPU only writes the tile buffer contents to the framebuffer in main memory at the end of each tile. Non-tiled-based, immediate-mode renderers generally require many more framebuffer accesses. The tile-based method therefore consumes less memory bandwidth, and supports operations such as depth testing, blending and anti-aliasing efficiently.
Another difference is the treatment of rendered buffers. Immediate renderers will "save" the content of your buffer, effectively allowing you to only draw differences in the rendered scene on top of what previously existed. This IS available in Mali, however, is not enabled by default as it can cause undesired effects if used incorrectly.
There is a Mali GLES2 SDK example on how to use "EGL Preserve" Correctly available in the GLES2 SDK here
The reason the Geforce ULP based nexus 7 works as intended is that, as an immediate based renderer, it defaults to preserving the buffers, whereas Mali does not.
From the Khronos EGL specification:
EGL_SWAP_BEHAVIOR
Specifies the effect on the color buffer of posting a surface with eglSwapBuffers. A value of EGL_BUFFER_PRESERVED indicates that color buffer contents are unaffected, while EGL_BUFFER_DESTROYED indicates that color buffer contents may be destroyed or changed by the operation.
The initial value of EGL_SWAP_BEHAVIOR is chosen by the implementation.
The default value for EGL_SWAP_BEHAVIOUR on the Mali platform is EGL_BUFFER_DESTROYED. This is due to the performance hit associated with having to fetch the previous buffer from memory before rendering the new frame, and storing it at the end as well as the consumption of bandwidth (which is also incredibly bad for battery life on mobile devices). I am unable to comment with certainty as to the default behavior of the Tegra SoCs however, it is apparent to me that their default is EGL_BUFFER_PRESERVED.
To clarify Mali's position with regards to the Khronos GLES specifications - Mali is fully compliant.
Upvotes: 0
Reputation: 889
Why don´t you provide one working example, so people actually could help?
From your code: I can´t see where do you create your line? Something like:
@Override public void onSurfaceCreated(GL10 gl, EGLConfig config){
...
mLine = new Lines();
...
}
As others already mentioned, in onDrawFrame
always clear the buffer:
public void onDrawFrame(GL10 gl )
{
// Erase CL_COLOR_BUFFER
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
Set the camera:
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, 3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mViewMatrix, 0);
Draw:
line.draw( dx, dy, ux, uy );
Upvotes: 0
Reputation: 162164
Okay, here it goes again: ^1
OpenGL is not a scene graph. OpenGL does not maintain a scene, knows about objects or keeps tracks of geometry. OpenGL is a drawing API. You give it a canvas (in form of a Window or a PBuffer) and order it to draw points, lines or triangles and OpenGL does exactly that. Once a primitive (=point, line, triangle) has been drawn, OpenGL has no recollection about it whatsoever. If something changes, you have to redraw the whole thing.
The proper steps to redraw a scene are:
Disable the stencil test, so that the following step operates on the whole window.
Clear the framebuffer using glClear(bits)
, where bits is a bitmask specifying which parts of the canvas to clear. When rendering a new frame you want to clear everything so bits = GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT
;
set the viewport, build an apropriate projection matrix
for each object in the scene load the right modelview matrix, set uniforms, select the vertex arrays and make the drawing call.
finish the rendering by flushing the pipeline. If using a single buffered window glFinish()
, if using a double buffered window call SwapBuffers
. In case of higher level frameworks this may be performed by the framework.
Important Once the drawing has been finished on a double buffered window, you must not continue to send drawing operations, as by performing the buffer swap the contents of the back buffer you're drawing to are undefined. Hence you must start the drawing anew, beginning with clearing the framebuffer (steps 1 and 2).
What your code misses are exactly those two steps. Also I have the impression that you're performing OpenGL drawing calls in direct reaction to input events, possibly in the input event handlers themself. Don't do this!. Instead use the input events to add to a list of primitives (lines in your case) to draw, then send a redraw event, which makes the framework call the drawing function. In the drawing function iterate over that list to draw the desired lines.
Redrawing the whole scene is canonical in OpenGL!
[1] (geesh, I'm getting tired of having to write this every 3rd question or so…)
Upvotes: 4
Reputation: 16582
Taking a punt here, but are you ever actually clearing the screen? The kinds of behaviour you are seeing suggest that you are not, and that in different scenaries you are seeing different errors - uninitialised memory, reusing an old buffer, implicitly clearing, etc.
GL requires you to be specific about what you want, so you need to explicitly clear.
Upvotes: 3
Reputation: 2730
OpenGL is just a standard. The actual implementation of the API is up to the graphics card manufacturer. So yes, OpenGL development can be GPU dependant sometimes. However, all implementations should provide the same result (what happens behind the scenes can be really different). If your code gives a different result with different GPUs, there is probably a version difference in the OpenGL implementation.
You can use these functions to get the supported OpenGL version:
glGetIntegerv(GL_MAJOR_VERSION, *); //version 3.0+
glGetIntegerv(GL_MINOR_VERSION, *); //version 3.0+
glGetString(GL_VERSION); //all versions
Upvotes: 1