Move buffer binding assignment to javascript

This commit is contained in:
John Alanbrook 2024-05-09 20:37:11 -05:00
parent 22c38fe481
commit d43d9d8fe3
6 changed files with 100 additions and 61 deletions

View file

@ -544,6 +544,7 @@ int mat2type(int mat)
switch(mat) { switch(mat) {
case MAT_POS: case MAT_POS:
return SG_VERTEXFORMAT_FLOAT3; return SG_VERTEXFORMAT_FLOAT3;
case MAT_PPOS:
case MAT_WH: case MAT_WH:
case MAT_ST: case MAT_ST:
return SG_VERTEXFORMAT_FLOAT2; return SG_VERTEXFORMAT_FLOAT2;
@ -558,11 +559,26 @@ int mat2type(int mat)
case MAT_COLOR: case MAT_COLOR:
return SG_VERTEXFORMAT_UBYTE4N; return SG_VERTEXFORMAT_UBYTE4N;
case MAT_ANGLE: case MAT_ANGLE:
case MAT_SCALE:
return SG_VERTEXFORMAT_FLOAT; return SG_VERTEXFORMAT_FLOAT;
}; };
return 0; return 0;
} }
int mat2step(int mat)
{
switch(mat) {
case MAT_POS:
case MAT_UV:
case MAT_TAN:
case MAT_NORM:
case MAT_BONE:
case MAT_WEIGHT:
return SG_VERTEXSTEP_PER_VERTEX;
};
return SG_VERTEXSTEP_PER_INSTANCE;
}
sg_buffer mat2buffer(int mat, primitive *p) sg_buffer mat2buffer(int mat, primitive *p)
{ {
switch(mat) { switch(mat) {

View file

@ -18,6 +18,8 @@
#define MAT_ANGLE 7 #define MAT_ANGLE 7
#define MAT_WH 8 #define MAT_WH 8
#define MAT_ST 9 #define MAT_ST 9
#define MAT_PPOS 10
#define MAT_SCALE 11
typedef struct material { typedef struct material {
struct texture *diffuse; struct texture *diffuse;

View file

@ -17,9 +17,12 @@
#include "sokol/sokol_gfx.h" #include "sokol/sokol_gfx.h"
sg_shader vid_shader; void datastream_free(datastream *ds)
sg_pipeline vid_pipeline; {
sg_bindings vid_bind; sg_destroy_image(ds->img);
plm_destroy(ds->plm);
free(ds);
}
void soundstream_fillbuf(struct datastream *ds, soundbyte *buf, int frames) { void soundstream_fillbuf(struct datastream *ds, soundbyte *buf, int frames) {
for (int i = 0; i < frames*CHANNELS; i++) for (int i = 0; i < frames*CHANNELS; i++)
@ -27,17 +30,14 @@ void soundstream_fillbuf(struct datastream *ds, soundbyte *buf, int frames) {
} }
static void render_frame(plm_t *mpeg, plm_frame_t *frame, struct datastream *ds) { static void render_frame(plm_t *mpeg, plm_frame_t *frame, struct datastream *ds) {
return; if (ds->dirty) return;
uint8_t rgb[frame->height*frame->width*4]; uint8_t rgb[frame->height*frame->width*4];
memset(rgb,255,frame->height*frame->width*4);
plm_frame_to_rgba(frame, rgb, frame->width*4); plm_frame_to_rgba(frame, rgb, frame->width*4);
sg_image_data imgd; sg_image_data imgd = {0};
sg_range ir = { imgd.subimage[0][0] = SG_RANGE(rgb);
.ptr = rgb,
.size = frame->height*frame->width*4*sizeof(uint8_t)
};
imgd.subimage[0][0] = ir;
sg_update_image(ds->img, &imgd); sg_update_image(ds->img, &imgd);
ds->dirty = true;
} }
static void render_audio(plm_t *mpeg, plm_samples_t *samples, struct datastream *ds) { static void render_audio(plm_t *mpeg, plm_samples_t *samples, struct datastream *ds) {
@ -65,16 +65,21 @@ struct datastream *ds_openvideo(const char *path)
plm_get_num_audio_streams(ds->plm), plm_get_num_audio_streams(ds->plm),
plm_get_duration(ds->plm)); plm_get_duration(ds->plm));
ds->img = sg_make_image(&(sg_image_desc){ ds->img = sg_make_image(&(sg_image_desc){
.width = plm_get_width(ds->plm), .width = plm_get_width(ds->plm),
.height = plm_get_height(ds->plm) .height = plm_get_height(ds->plm),
.usage = SG_USAGE_STREAM,
.type = SG_IMAGETYPE_2D,
.pixel_format = SG_PIXELFORMAT_RGBA8,
}); });
plm_set_video_decode_callback(ds->plm, render_frame, ds);
return ds;
ds->ring = ringnew(ds->ring, 8192); ds->ring = ringnew(ds->ring, 8192);
plugin_node(make_node(ds, soundstream_fillbuf, NULL), masterbus); plugin_node(make_node(ds, soundstream_fillbuf, NULL), masterbus);
plm_set_video_decode_callback(ds->plm, render_frame, ds);
plm_set_audio_decode_callback(ds->plm, render_audio, ds); plm_set_audio_decode_callback(ds->plm, render_audio, ds);
plm_set_loop(ds->plm, false); plm_set_loop(ds->plm, false);
@ -84,13 +89,12 @@ struct datastream *ds_openvideo(const char *path)
// Adjust the audio lead time according to the audio_spec buffer size // Adjust the audio lead time according to the audio_spec buffer size
plm_set_audio_lead_time(ds->plm, BUF_FRAMES / SAMPLERATE); plm_set_audio_lead_time(ds->plm, BUF_FRAMES / SAMPLERATE);
ds->playing = true;
return ds; return ds;
} }
void ds_advance(struct datastream *ds, double s) { void ds_advance(struct datastream *ds, double s) {
if (ds->playing) plm_decode(ds->plm, s); ds->dirty = false;
plm_decode(ds->plm, s);
} }
void ds_seek(struct datastream *ds, double time) { void ds_seek(struct datastream *ds, double time) {
@ -104,31 +108,6 @@ void ds_advanceframes(struct datastream *ds, int frames) {
} }
} }
void ds_pause(struct datastream *ds) {
ds->playing = false;
}
void ds_stop(struct datastream *ds) {
if (ds->plm != NULL) {
plm_destroy(ds->plm);
ds->plm = NULL;
}
ds->playing = false;
}
// TODO: Must be a better way
int ds_videodone(struct datastream *ds) {
return (ds->plm == NULL) || plm_get_time(ds->plm) >= plm_get_duration(ds->plm);
}
double ds_remainingtime(struct datastream *ds) {
if (ds->plm != NULL)
return plm_get_duration(ds->plm) - plm_get_time(ds->plm);
else
return 0.f;
}
double ds_length(struct datastream *ds) { double ds_length(struct datastream *ds) {
return plm_get_duration(ds->plm); return plm_get_duration(ds->plm);
} }

View file

@ -11,16 +11,22 @@ struct soundstream;
struct datastream { struct datastream {
plm_t *plm; plm_t *plm;
double last_time;
int playing;
sg_image img; sg_image img;
sg_image y;
sg_image cr;
sg_image cb;
int width; int width;
int height; int height;
int dirty;
soundbyte *ring; soundbyte *ring;
}; };
typedef struct datastream datastream;
struct texture; struct texture;
void datastream_free(datastream *ds);
struct datastream *ds_openvideo(const char *path); struct datastream *ds_openvideo(const char *path);
struct texture *ds_maketexture(struct datastream *); struct texture *ds_maketexture(struct datastream *);
void ds_advance(struct datastream *ds, double); void ds_advance(struct datastream *ds, double);

View file

@ -83,6 +83,7 @@ QJSCLASS(window)
QJSCLASS(constraint) QJSCLASS(constraint)
QJSCLASS(primitive) QJSCLASS(primitive)
QJSCLASS(sg_buffer) QJSCLASS(sg_buffer)
QJSCLASS(datastream)
static JSValue sound_proto; static JSValue sound_proto;
sound *js2sound(JSValue v) { return js2dsp_node(v)->data; } sound *js2sound(JSValue v) { return js2dsp_node(v)->data; }
@ -590,18 +591,23 @@ static const JSCFunctionListEntry js_warp_damp_funcs [] = {
CGETSET_ADD(warp_damp, damp) CGETSET_ADD(warp_damp, damp)
}; };
sg_bindings js2bind(JSValue mat, JSValue prim) sg_bindings js2bind(JSValue v)
{ {
sg_bindings bind = {0}; sg_bindings bind = {0};
for (int i = 0; i < js_arrlen(mat); i++) { JSValue attrib = js_getpropstr(v, "attrib");
bind.fs.images[i] = js2texture(js_getpropidx(mat, i))->id; for (int i = 0; i < js_arrlen(attrib); i++)
bind.vertex_buffers[i] = *js2sg_buffer(js_getpropidx(attrib,i));
JSValue index = js_getpropstr(v, "index");
if (!JS_IsUndefined(index))
bind.index_buffer = *js2sg_buffer(index);
JSValue imgs = js_getpropstr(v, "images");
for (int i = 0; i < js_arrlen(imgs); i++) {
bind.fs.images[i] = js2texture(js_getpropidx(imgs, i))->id;
bind.fs.samplers[i] = std_sampler; bind.fs.samplers[i] = std_sampler;
} }
bind.vertex_buffers[0] = *js2sg_buffer(js_getpropstr(prim, "pos"));
bind.index_buffer = *js2sg_buffer(js_getpropstr(prim, "index"));
bind.vertex_buffers[1] = *js2sg_buffer(js_getpropstr(prim, "uv"));
return bind; return bind;
} }
@ -624,7 +630,7 @@ JSC_GETSET(emitter, persist_var, number)
JSC_GETSET(emitter, warp_mask, bitmask) JSC_GETSET(emitter, warp_mask, bitmask)
JSC_CCALL(emitter_emit, emitter_emit(js2emitter(this), js2number(argv[0]), js2transform2d(argv[1]))) JSC_CCALL(emitter_emit, emitter_emit(js2emitter(this), js2number(argv[0]), js2transform2d(argv[1])))
JSC_CCALL(emitter_step, emitter_step(js2emitter(this), js2number(argv[0]), js2transform2d(argv[1]))) JSC_CCALL(emitter_step, emitter_step(js2emitter(this), js2number(argv[0]), js2transform2d(argv[1])))
JSC_CCALL(emitter_draw, emitter_draw(js2emitter(this), js2bind(argv[0], argv[1]))) JSC_CCALL(emitter_draw, emitter_draw(js2emitter(this), js2bind(argv[0])))
JSC_CCALL(render_flushtext, text_flush()) JSC_CCALL(render_flushtext, text_flush())
@ -664,11 +670,11 @@ JSC_CCALL(render_end_pass,
} }
p.id = js2number(argv[0]); p.id = js2number(argv[0]);
sg_apply_pipeline(p); sg_apply_pipeline(p);
sg_bindings bind = js2bind(argv[1], argv[2]); sg_bindings bind = js2bind(argv[1]);
bind.fs.images[0] = screencolor; bind.fs.images[0] = screencolor;
bind.fs.samplers[0] = std_sampler; bind.fs.samplers[0] = std_sampler;
sg_apply_bindings(&bind); sg_apply_bindings(&bind);
int c = js2number(js_getpropstr(argv[2], "count")); int c = js2number(js_getpropstr(argv[1], "count"));
sg_draw(0,c,1); sg_draw(0,c,1);
sg_end_pass(); sg_end_pass();
@ -730,8 +736,9 @@ sg_shader js2shader(JSValue v)
JSValue pairs = js_getpropstr(fs, "image_sampler_pairs"); JSValue pairs = js_getpropstr(fs, "image_sampler_pairs");
unin = js_arrlen(pairs); unin = js_arrlen(pairs);
for (int i = 0; i < unin; i++) { for (int i = 0; i < unin; i++) {
JSValue pair = js_getpropidx(pairs, i);
desc.fs.image_sampler_pairs[0].used = true; desc.fs.image_sampler_pairs[0].used = true;
desc.fs.image_sampler_pairs[0].image_slot = 0; desc.fs.image_sampler_pairs[0].image_slot = js2number(js_getpropstr(pair, "slot"));
desc.fs.image_sampler_pairs[0].sampler_slot = 0; desc.fs.image_sampler_pairs[0].sampler_slot = 0;
} }
@ -822,9 +829,9 @@ JSC_CCALL(render_setunim4,
); );
JSC_CCALL(render_spdraw, JSC_CCALL(render_spdraw,
sg_bindings bind = js2bind(argv[0], argv[1]); sg_bindings bind = js2bind(argv[0]);
sg_apply_bindings(&bind); sg_apply_bindings(&bind);
int p = js2number(js_getpropstr(argv[1], "count")); int p = js2number(js_getpropstr(argv[0], "count"));
sg_draw(0,p,1); sg_draw(0,p,1);
) )
@ -1549,6 +1556,27 @@ static const JSCFunctionListEntry js_dspsound_funcs[] = {
MIST_FUNC_DEF(dspsound, mod, 1) MIST_FUNC_DEF(dspsound, mod, 1)
}; };
JSC_CCALL(datastream_time, return number2js(plm_get_time(js2datastream(this)->plm)); )
JSC_CCALL(datastream_advance_frames, ds_advanceframes(js2datastream(this), js2number(argv[0])))
JSC_CCALL(datastream_seek, ds_seek(js2datastream(this), js2number(argv[0])))
JSC_CCALL(datastream_advance, ds_advance(js2datastream(this), js2number(argv[0])))
JSC_CCALL(datastream_duration, return number2js(ds_length(js2datastream(this))))
JSC_CCALL(datastream_framerate, return number2js(plm_get_framerate(js2datastream(this)->plm)))
static const JSCFunctionListEntry js_datastream_funcs[] = {
MIST_FUNC_DEF(datastream, time, 0),
MIST_FUNC_DEF(datastream, advance_frames, 1),
MIST_FUNC_DEF(datastream, seek, 1),
MIST_FUNC_DEF(datastream, advance, 1),
MIST_FUNC_DEF(datastream, duration, 0),
MIST_FUNC_DEF(datastream, framerate, 0),
};
JSC_CCALL(pshape_set_sensor, shape_set_sensor(js2ptr(argv[0]), js2boolean(argv[1]))) JSC_CCALL(pshape_set_sensor, shape_set_sensor(js2ptr(argv[0]), js2boolean(argv[1])))
JSC_CCALL(pshape_get_sensor, return boolean2js(shape_get_sensor(js2ptr(argv[0])))) JSC_CCALL(pshape_get_sensor, return boolean2js(shape_get_sensor(js2ptr(argv[0]))))
JSC_CCALL(pshape_set_enabled, shape_enabled(js2ptr(argv[0]), js2boolean(argv[1]))) JSC_CCALL(pshape_set_enabled, shape_enabled(js2ptr(argv[0]), js2boolean(argv[1])))
@ -1846,7 +1874,6 @@ JSC_CCALL(os_make_line_prim,
*idx = par_idx_buffer(m->triangle_indices, m->num_triangles*3); *idx = par_idx_buffer(m->triangle_indices, m->num_triangles*3);
js_setpropstr(prim, "index", sg_buffer2js(idx)); js_setpropstr(prim, "index", sg_buffer2js(idx));
printf("there are %d verts\n", m->num_vertices);
float uv[m->num_vertices*2]; float uv[m->num_vertices*2];
for (int i = 0; i < m->num_vertices; i++) { for (int i = 0; i < m->num_vertices; i++) {
uv[i*2] = m->annotations[i].u_along_curve; uv[i*2] = m->annotations[i].u_along_curve;
@ -1930,6 +1957,14 @@ JSC_CCALL(os_make_plane,
return parmesh2js(par_shapes_create_plane(js2number(argv[0]), js2number(argv[1]))); return parmesh2js(par_shapes_create_plane(js2number(argv[0]), js2number(argv[1])));
) )
JSC_SCALL(os_make_video,
datastream *ds = ds_openvideo(str);
ret = datastream2js(ds);
texture *t = malloc(sizeof(texture));
t->id = ds->img;
js_setpropstr(ret, "texture", texture2js(t));
)
static const JSCFunctionListEntry js_os_funcs[] = { static const JSCFunctionListEntry js_os_funcs[] = {
MIST_FUNC_DEF(os, cwd, 0), MIST_FUNC_DEF(os, cwd, 0),
MIST_FUNC_DEF(os, env, 1), MIST_FUNC_DEF(os, env, 1),
@ -1959,7 +1994,8 @@ static const JSCFunctionListEntry js_os_funcs[] = {
MIST_FUNC_DEF(os, make_klein_bottle, 2), MIST_FUNC_DEF(os, make_klein_bottle, 2),
MIST_FUNC_DEF(os, make_trefoil_knot, 3), MIST_FUNC_DEF(os, make_trefoil_knot, 3),
MIST_FUNC_DEF(os, make_hemisphere, 2), MIST_FUNC_DEF(os, make_hemisphere, 2),
MIST_FUNC_DEF(os, make_plane, 2) MIST_FUNC_DEF(os, make_plane, 2),
MIST_FUNC_DEF(os, make_video, 1),
}; };
#include "steam.h" #include "steam.h"
@ -1983,6 +2019,7 @@ void ffi_load() {
QJSCLASSPREP_FUNCS(constraint); QJSCLASSPREP_FUNCS(constraint);
QJSCLASSPREP_FUNCS(window); QJSCLASSPREP_FUNCS(window);
QJSCLASSPREP_FUNCS(model); QJSCLASSPREP_FUNCS(model);
QJSCLASSPREP_FUNCS(datastream);
QJSGLOBALCLASS(nota); QJSGLOBALCLASS(nota);
QJSGLOBALCLASS(input); QJSGLOBALCLASS(input);

View file

@ -185,7 +185,6 @@ HMM_Mat4 projection = {0.f};
HMM_Mat4 hudproj = {0.f}; HMM_Mat4 hudproj = {0.f};
HMM_Mat4 useproj = {0}; HMM_Mat4 useproj = {0};
void openglRender(struct window *window, transform2d *cam, float zoom) { void openglRender(struct window *window, transform2d *cam, float zoom) {
HMM_Vec2 usesize = mainwin.rendersize; HMM_Vec2 usesize = mainwin.rendersize;
if (mainwin.mode == MODE_FULL) if (mainwin.mode == MODE_FULL)