aboutsummaryrefslogtreecommitdiff
path: root/tests/gles2_conformance.cpp
blob: 77681bf41c84d5a15936cdfb17efb400d3cfaea7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
#include "SDL/SDL.h"

#include <GLES2/gl2.h>

#include <stdio.h>
#include <string.h>

int result = 1; // Success
#define assert(x) do { if (!(x)) {result = 0; printf("Assertion failure: %s in %s:%d!\n", #x, __FILE__, __LINE__); } } while(0)

int main(int argc, char *argv[])
{
    SDL_Surface *screen;

    // Slightly different SDL initialization
    if ( SDL_Init(SDL_INIT_VIDEO) != 0 ) {
        printf("Unable to initialize SDL: %s\n", SDL_GetError());
        return 1;
    }

    screen = SDL_SetVideoMode( 640, 480, 16, SDL_OPENGL ); // *changed*
    if ( !screen ) {
        printf("Unable to set video mode: %s\n", SDL_GetError());
        return 1;
    }

    // Test that code containing functions related to GLES2 binary shader API will successfully compile ad run
    // (will be nonfunctional no-ops since WebGL doesn't have binary shaders)
    GLuint vs = glCreateShader(GL_VERTEX_SHADER);
    glShaderBinary(1, &vs, 0, 0, 0);
    assert(glGetError() != GL_NO_ERROR);

    // Calling any of glGet() with null pointer should be detected and not crash.
    // Note that native code can crash when passed a null pointer, and the GL spec does not say anything
    // about this, so we spec that Emscripten GLES2 code should generate GL_INVALID_VALUE.
    glGetBooleanv(GL_ACTIVE_TEXTURE, 0);
    assert(glGetError() == GL_INVALID_VALUE);
    glGetIntegerv(GL_ACTIVE_TEXTURE, 0);
    assert(glGetError() == GL_INVALID_VALUE);
    glGetFloatv(GL_ACTIVE_TEXTURE, 0);
    assert(glGetError() == GL_INVALID_VALUE);

    GLboolean b = GL_TRUE;
    GLint i = -1;
    GLfloat f = -1.f;
    glGetBooleanv(GL_NUM_SHADER_BINARY_FORMATS, &b);
    assert(glGetError() == GL_NO_ERROR);
    assert(b == GL_FALSE);
    glGetIntegerv(GL_NUM_SHADER_BINARY_FORMATS, &i);
    assert(glGetError() == GL_NO_ERROR);
    assert(i == 0);
    glGetFloatv(GL_NUM_SHADER_BINARY_FORMATS, &f);
    assert(glGetError() == GL_NO_ERROR);
    assert(f == 0.f);

    // Currently testing that glGetIntegerv(GL_SHADER_BINARY_FORMATS) should be a no-op.
    int formats[10] = { 123 };
    glGetIntegerv(GL_SHADER_BINARY_FORMATS, formats);
    assert(glGetError() == GL_NO_ERROR);
    assert(formats[0] == 123);

    // Converting enums to booleans or floats would be odd, so test that the following report a GL_INVALID_ENUM.
    glGetBooleanv(GL_SHADER_BINARY_FORMATS, &b);
    assert(glGetError() == GL_INVALID_ENUM);    
    glGetFloatv(GL_SHADER_BINARY_FORMATS, &f);
    assert(glGetError() == GL_INVALID_ENUM);

    // Test that we can query for shader compiler support.
    glGetIntegerv(GL_SHADER_COMPILER, &i);
    assert(glGetError() == GL_NO_ERROR);
    assert(i != 0);
    glGetBooleanv(GL_SHADER_COMPILER, &b);
    assert(glGetError() == GL_NO_ERROR);
    assert(b == GL_TRUE);
    glGetFloatv(GL_SHADER_COMPILER, &f);
    assert(glGetError() == GL_NO_ERROR);
    assert(f == 1.f);
    
#ifdef REPORT_RESULT
    REPORT_RESULT();
#endif
}