i want to convert an std::vector<unsigned char> to std::string. Unfortunately, the application crashes. Can you help me?
Edit: The app only crashes when using the -lGL compiler switch. Without it, the app works ok.
Edit2: Problem solved, see last post of mine.
Example code:
Code: Select all
#include <vector>
#include <string>
#include <iostream>
int main()
{
std::vector<unsigned char> v;
v.push_back(0x46);
v.push_back(0x4F);
v.push_back(0x4F);
v.push_back(0x00);
std::string s(v.begin(), v.end());
std::cout << s << std::endl;
return 0;
}
Program received signal SIGSEGV, Segmentation fault.
0x0000000000000000 in ?? ()
(gdb) where
#0 0x0000000000000000 in ?? ()
#1 0x00007ffff374e291 in init () at dlerror.c:177
#2 0x00007ffff374e687 in _dlerror_run (
operate=operate@entry=0x7ffff374e130 <dlsym_doit>,
args=args@entry=0x7fffffffe210) at dlerror.c:129
#3 0x00007ffff374e198 in __dlsym (handle=<optimized out>,
name=<optimized out>) at dlsym.c:70
#4 0x00007ffff7b3b69e in ?? () from /usr/lib/x86_64-linux-gnu/libGL.so.1
#5 0x00007ffff7b1f556 in ?? () from /usr/lib/x86_64-linux-gnu/libGL.so.1
#6 0x00007ffff7dea97d in call_init (l=0x7ffff7ff74c8, argc=argc@entry=1,
argv=argv@entry=0x7fffffffe358, env=env@entry=0x7fffffffe368)
at dl-init.c:64
#7 0x00007ffff7deaaa3 in call_init (env=0x7fffffffe368, argv=0x7fffffffe358,
argc=1, l=<optimized out>) at dl-init.c:36
#8 _dl_init (main_map=0x7ffff7ffe1a8, argc=1, argv=0x7fffffffe358,
env=0x7fffffffe368) at dl-init.c:126
#9 0x00007ffff7ddd1ca in _dl_start_user () from /lib64/ld-linux-x86-64.so.2
#10 0x0000000000000001 in ?? ()
#11 0x00007fffffffe5f1 in ?? ()
#12 0x0000000000000000 in ?? ()