Blame c++/contourgl/cudacontext.cpp

383633
/*
383633
    ......... 2018 Ivan Mahonin
383633
383633
    This program is free software: you can redistribute it and/or modify
383633
    it under the terms of the GNU General Public License as published by
383633
    the Free Software Foundation, either version 3 of the License, or
383633
    (at your option) any later version.
383633
383633
    This program is distributed in the hope that it will be useful,
383633
    but WITHOUT ANY WARRANTY; without even the implied warranty of
383633
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
383633
    GNU General Public License for more details.
383633
383633
    You should have received a copy of the GNU General Public License
383633
    along with this program.  If not, see <http: licenses="" www.gnu.org="">.</http:>
383633
*/
383633
383633
#include <cassert></cassert>
383633
#include <cstring></cstring>
383633
383633
#include <iostream></iostream>
383633
383633
#include "cudacontext.h"
383633
383633
383633
using namespace std;
383633
383633
383633
CudaParams::CudaParams(): params_buffer_size() {
383633
	params_extra.push_back(CU_LAUNCH_PARAM_BUFFER_POINTER);
383633
	params_extra.push_back(NULL);
383633
	params_extra.push_back(CU_LAUNCH_PARAM_BUFFER_SIZE);
383633
	params_extra.push_back(¶ms_buffer_size);
383633
	params_extra.push_back(CU_LAUNCH_PARAM_END);
383633
}
383633
383633
void CudaParams::reset() {
383633
	params_buffer.clear();
383633
	params_offsets.clear();
383633
	params_pointers.clear();
383633
	params_extra.clear();
383633
	params_buffer_size = 0;
383633
}
383633
383633
CudaParams& CudaParams::add(const void* data, int size, int align) {
383633
	assert(align > 0);
383633
383633
	int index = params_buffer.empty() ? 0 : ((params_buffer.size() - 1)/align + 1)*align;
383633
	params_buffer.resize(index + size);
383633
	memcpy(¶ms_buffer[index], data, size);
383633
	params_buffer_size = params_buffer.size();
383633
383633
	params_offsets.push_back(index);
383633
383633
	char *root = ¶ms_buffer.front();
383633
	params_pointers.push_back(root + index);
383633
	if (params_pointers.front() != root) {
383633
		params_pointers.clear();
383633
		for(std::vector<int>::iterator i = params_offsets.begin(); i != params_offsets.end(); ++i)</int>
383633
			params_pointers.push_back(root + *i);
383633
	}
383633
	params_extra[1] = root;
383633
383633
	return *this;
383633
}
383633
383633
383633
CudaContext::CudaContext():
383633
	device(),
383633
	context(),
383633
	err()
383633
{
383633
	const int device_index = 0;
383633
383633
	err = cuInit(0);
383633
    assert(!err);
383633
383633
    err = cuDeviceGet(&device, device_index);
383633
    assert(!err);
383633
383633
    char device_name[1024] = {};
383633
    err = cuDeviceGetName(device_name, sizeof(device_name), device);
383633
    assert(!err);
383633
    //cout << "CUDA device " << device_index << ": " << device_name << endl;
383633
383633
    err = cuCtxCreate(&context, CU_CTX_SCHED_AUTO, device);
383633
    assert(!err);
383633
6fa009
    //hello();
383633
}
383633
383633
CudaContext::~CudaContext() {
383633
	cuCtxDestroy(context);
383633
}
383633
383633
void CudaContext::hello() {
383633
	CUmodule module;
383633
	err = cuModuleLoad(&module, "cuda/hello.ptx");
383633
	assert(!err);
383633
383633
	CUfunction kernel;
383633
	err = cuModuleGetFunction(&kernel, module, "hello");
383633
	assert(!err);
383633
383633
	char data[] = "......";
383633
383633
	CUdeviceptr buffer;
383633
	err = cuMemAlloc(&buffer, sizeof(data));
383633
383633
	CudaParams params;
383633
	params.add(buffer);
383633
383633
	err = cuLaunchKernel(
383633
		kernel,
383633
		1, 1, 1,
383633
		sizeof(data), 1, 1,
383633
		0, 0, 0,
383633
		params.get_extra() );
383633
	assert(!err);
383633
383633
	err = cuStreamSynchronize(0);
383633
	assert(!err);
383633
383633
	err = cuMemcpyDtoH(data, buffer, sizeof(data));
383633
	assert(!err);
383633
383633
	err = cuMemFree(buffer);
383633
	assert(!err);
383633
383633
	err = cuModuleUnload(module);
383633
	assert(!err);
383633
383633
	cout << data << endl;
383633
}