Cocos2d-x   发布时间:2022-05-02  发布网站:大佬教程  code.js-code.com
大佬教程收集整理的这篇文章主要介绍了cocos2dx多线程以及线程同步 与 cocos2dx内存管理与多线程问题大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
@H_696_2@

cocos2dx多线程以及线程同步 与 cocos2dx内存管理与多线程问题@H_696_2@

分类:移动研发COCOS2DX移动ANDROID研发 IOS研发@H_696_2@

目录(?)@H_696_2@[+]

@H_696_2@

cocos2d-x引擎在内部实现了一个庞大的主循环,每帧之间更新界面,如果耗时的操作放到了主线程中,游戏的界面就会卡,这是不能容忍的,游戏最基本的条件就是流畅性,这就是为什么游戏开发选择C++的原因。另外现在双核手机和四核手机越来越普遍了,是时候使用多线程来挖掘硬件的潜力了。@H_696_2@

1.环境搭建@H_696_2@

cocos2d-x中的多线程使用pthread就可以实现跨平台,而且也不是很难理解。使用pthread需要先配置一下工程。右击工程----->属性----->配置属性---->链接器----->输入---->附加依赖项中添加pthreadVCE2.lib,如下图@H_696_2@


@H_696_2@

接着添加附加包含目录,右击项目,属性----->C/C++---->常规----->附加包含目录加入pthread头文件所在的目录@H_696_2@


@H_696_2@

这样,环境就搭建起来了。@H_696_2@

2.多线程的使用@H_696_2@

使用pthread来实现多线程,最重要的一个函数是@H_696_2@

  1. PTW32_DLLPORTint@H_696_2@PTW32_CDECLpthread_create(pthread_t*tid,//线程的标示@H_696_2@
  2. const@H_696_2@pthread_attr_t*attr,0); BACkground-color:inherit">//创建线程的参数@H_696_2@
  3. void@H_696_2@*(*start)(void@H_696_2@*),0); BACkground-color:inherit">//入口函数的指针@H_696_2@
  4. void@H_696_2@*arg);//传递给线程的数据@H_696_2@
    PTW32_DLLPORT@H_696_2@int@H_696_2@PTW32_CDECLpthread_create(pthread_t*tid,@H_696_2@//线程的标示@H_696_2@@H_696_2@@H_696_2@
  1. const@H_696_2@pthread_attr_t*attr,0); BACkground-color:inherit">//创建线程的参数@H_696_2@@H_696_2@@H_696_2@
  2. void@H_696_2@*(*start)(@H_696_2@void@H_696_2@*),0); BACkground-color:inherit">//入口函数的指针@H_696_2@@H_696_2@@H_696_2@
  3. void@H_696_2@*arg);@H_696_2@//传递给线程的数据@H_696_2@@H_696_2@@H_696_2@

HelloWorldScene.h文件中@H_696_2@@H_696_2@

copy
    pthread_tpidrun,pidgo;
  1. static@H_696_2@void@H_696_2@*th_run(void@H_696_2@*r);
  2. void@H_696_2@*th_go(void@H_696_2@*r);
copy
    pthread_tpidrun,pidgo;@H_696_2@
  1. static@H_696_2@@H_696_2@void@H_696_2@*th_run(@H_696_2@void@H_696_2@*r);@H_696_2@@H_696_2@
  2. void@H_696_2@*th_go(@H_696_2@void@H_696_2@*r);@H_696_2@@H_696_2@
定义了两个函数和两个线程的标识。@H_696_2@

然后自定义了一个类,用于给线程传递数据。student类如下:@H_696_2@

copy
    #pragmaonce@H_696_2@
  1. #include<String>@H_696_2@
  2. class@H_696_2@student
  3. {
  4. public@H_696_2@:
  5. student(void@H_696_2@);
  6. student(std::stringname,int@H_696_2@age,std::stringseX);
  7. ~student(
  8. std::stringname;
  9. int@H_696_2@age;
  10. std::stringsex;
  11. };
copy
    #pragmaonce@H_696_2@@H_696_2@@H_696_2@
  1. #include<String>@H_696_2@@H_696_2@@H_696_2@
  2. class@H_696_2@student@H_696_2@@H_696_2@
  3. {@H_696_2@
  4. public@H_696_2@:@H_696_2@@H_696_2@
  5. student(void@H_696_2@);@H_696_2@@H_696_2@
  6. student(std::stringname,87); BACkground-color:inherit; font-weight:bold">int@H_696_2@age,std::stringseX);@H_696_2@@H_696_2@
  7. ~student( @H_696_2@
  8. std::stringname;@H_696_2@
  9. int@H_696_2@age;@H_696_2@@H_696_2@
  10. std::stringsex;@H_696_2@
  11. };@H_696_2@

源文件如下@H_696_2@

copy
    #include"student.h"@H_696_2@
  1. #include"cocos2d.h"@H_696_2@
  2. student::student(void@H_696_2@)
  3. {
  4. }
  5. student::~student(void@H_696_2@)
  6. cocos2d::CCLog("deletedata"@H_696_2@);
  7. student::student(std::stringname,std::stringseX)
  8. this@H_696_2@->name=name;
  9. this@H_696_2@->age=age;
  10. this@H_696_2@->sex=sex;
  11. }
copy
    #include"student.h"@H_696_2@@H_696_2@@H_696_2@
  1. #include"cocos2d.h"@H_696_2@@H_696_2@@H_696_2@
  2. student::student(void@H_696_2@)@H_696_2@@H_696_2@
  3. {@H_696_2@
  4. }@H_696_2@
  5. @H_696_2@
  6. student::~student(void@H_696_2@)@H_696_2@@H_696_2@
  7. cocos2d::CCLog("deletedata"@H_696_2@);@H_696_2@@H_696_2@
  8. student::student(std::stringname,std::stringseX)@H_696_2@@H_696_2@
  9. this@H_696_2@->name=name;@H_696_2@@H_696_2@
  10. this@H_696_2@->age=age;@H_696_2@@H_696_2@
  11. this@H_696_2@->sex=sex;@H_696_2@@H_696_2@
  12. }@H_696_2@
在退出菜单的回调函数中启动两个线程:@H_696_2@

copy @H_772_539@
    void@H_696_2@HelloWorld::menuCloseCallBACk(CCObject*pSender)
  1. student*temp=new@H_696_2@student(std::string("zhycheng"@H_696_2@),23,std::string("male"@H_696_2@));
  2. pthread_mutex_init(&mutex,null);
  3. pthread_create(&pidrun,NULL,th_run,temp);//启动线程@H_696_2@
  4. pthread_create(&pidgo,th_go,0);
  5. }
copy
    void@H_696_2@HelloWorld::menuCloseCallBACk(CCObject*pSender)@H_696_2@@H_696_2@
  1. student*temp=new@H_696_2@student(std::string(@H_696_2@"zhycheng"@H_696_2@),std::string(@H_696_2@"male"@H_696_2@));@H_696_2@@H_696_2@
  2. pthread_mutex_init(&mutex,null);@H_696_2@
  3. pthread_create(&pidrun,0); BACkground-color:inherit">//启动线程@H_696_2@@H_696_2@@H_696_2@
  4. pthread_create(&pidgo,0);@H_696_2@
  5. }@H_696_2@

可以看到,将student的指针传递给了pidrun线程,那么在pidrun线程中获得student信息如下:@H_696_2@

copy
    student*s=(student*)(r);
  1. CCLog("nameis%s,andageis%d,sexis%s"@H_696_2@,s->name.c_str(),s->age,s->sex.c_str());
  2. delete@H_696_2@s;
copy
    student*s=(student*)(r);@H_696_2@
  1. CCLog("nameis%s,sexis%s"@H_696_2@,s->sex.c_str());@H_696_2@@H_696_2@
  2. delete@H_696_2@s;@H_696_2@@H_696_2@

3.线程同步@H_696_2@

使用了线程,必然就要虑到线程同步,不同的线程同时访问资源的话,访问的顺序是不可预知的,会造成不可预知的结果。@H_696_2@

这里使用pthread_mutex_t来实现同步,下面我来演示一下使用多线程实现卖票系统。卖票的时候,是由多个窗口同时卖票,这里要做到一张票不要卖出去两次,不要出现有票却无法卖的结果。@H_696_2@

在线程函数th_run和th_go中来卖票,票的数量是一个全局变量,每卖出去一张票,就将票的数量减一。其中同步的pthread_mutex_t也是一个全局变量,就用它来实现线程同步。@H_696_2@

copy @H_665_696@
    void@H_696_2@*HelloWorld::th_run(void@H_696_2@*r)
  1. student*s=(student*)(r);
  2. CCLog("nameis%s,s->sex.c_str());
  3. delete@H_696_2@s;
  4. while@H_696_2@(true@H_696_2@)
  5. pthread_mutex_lock(&muteX);
  6. if@H_696_2@(ticket>0)
  7. CCLog("threadrunsell%d"@H_696_2@,ticket);
  8. ticket--;
  9. pthread_mutex_unlock(&muteX);
  10. }
  11. else@H_696_2@
  12. break@H_696_2@;
  13. Sleep(1);
  14. //Usleep(10);@H_696_2@
  15. return@H_696_2@NULL;
  16. copy
      void@H_696_2@*HelloWorld::th_run(@H_696_2@void@H_696_2@*r)@H_696_2@@H_696_2@
    1. student*s=(student*)(r);@H_696_2@
    2. CCLog("nameis%s,s->sex.c_str());@H_696_2@@H_696_2@
    3. delete@H_696_2@s;@H_696_2@@H_696_2@
    4. while@H_696_2@(@H_696_2@true@H_696_2@)@H_696_2@@H_696_2@
    5. pthread_mutex_lock(&muteX);@H_696_2@
    6. if@H_696_2@(ticket>0)@H_696_2@@H_696_2@
    7. CCLog("threadrunsell%d"@H_696_2@,ticket);@H_696_2@@H_696_2@
    8. ticket--;@H_696_2@
    9. pthread_mutex_unlock(&muteX);@H_696_2@
    10. }@H_696_2@
    11. else@H_696_2@@H_696_2@@H_696_2@
    12. break@H_696_2@;@H_696_2@@H_696_2@
    13. Sleep(1);@H_696_2@
    14. //Usleep(10);@H_696_2@@H_696_2@@H_696_2@
    15. return@H_696_2@NULL;@H_696_2@@H_696_2@
    16. }@H_696_2@

    copy
      void@H_696_2@*HelloWorld::th_go(true@H_696_2@)
    1. pthread_mutex_lock(&muteX);
    2. if@H_696_2@(ticket>0)
    3. CCLog("threadgosell%d"@H_696_2@,ticket);
    4. ticket--;
    5. pthread_mutex_unlock(&muteX);
    6. else@H_696_2@
    7. break@H_696_2@;
    8. return@H_696_2@NULL;
    9. copy
        void@H_696_2@*HelloWorld::th_go(@H_696_2@true@H_696_2@)@H_696_2@@H_696_2@
      1. pthread_mutex_lock(&muteX);@H_696_2@
      2. if@H_696_2@(ticket>0)@H_696_2@@H_696_2@
      3. CCLog("threadgosell%d"@H_696_2@,ticket);@H_696_2@@H_696_2@
      4. ticket--;@H_696_2@
      5. pthread_mutex_unlock(&muteX);@H_696_2@
      6. else@H_696_2@@H_696_2@@H_696_2@
      7. break@H_696_2@;@H_696_2@@H_696_2@
      8. return@H_696_2@NULL;@H_696_2@@H_696_2@
      9. }@H_696_2@


      @H_755_34@mutex被锁定后,其他线程若再想锁定mutex的话,必须等待,当该线程释放了mutex之后,其他线程才能锁定mutex。Sleep()函数可以使得该线程休眠,单位是毫秒。下面是卖票的结果:@H_696_2@

      copy @H_607_979@
        nameiszhycheng,andageis23,sexismale
      1. delete@H_696_2@data
      2. thread@H_696_2@runsell100
      3. thread@H_696_2@runsell99
      4. thread@H_696_2@gosell98
      5. thread@H_696_2@gosell97
      6. thread@H_696_2@runsell96
      7. thread@H_696_2@gosell95
      8. thread@H_696_2@gosell94
      9. thread@H_696_2@runsell93
      10. thread@H_696_2@gosell92
      11. thread@H_696_2@runsell91
      12. thread@H_696_2@gosell90
      13. thread@H_696_2@gosell89
      14. thread@H_696_2@runsell88
      15. thread@H_696_2@gosell87
      16. thread@H_696_2@runsell86
      17. thread@H_696_2@gosell85
      18. thread@H_696_2@runsell84
      19. thread@H_696_2@gosell83
      20. thread@H_696_2@runsell82
      21. thread@H_696_2@gosell81
      22. thread@H_696_2@runsell80
      23. thread@H_696_2@gosell79
      24. thread@H_696_2@runsell78
      25. thread@H_696_2@gosell77
      26. thread@H_696_2@runsell76
      27. thread@H_696_2@gosell75
      28. thread@H_696_2@runsell74
      29. thread@H_696_2@gosell73
      30. thread@H_696_2@runsell72
      31. thread@H_696_2@gosell71
      32. thread@H_696_2@runsell70
      33. thread@H_696_2@gosell69
      34. thread@H_696_2@gosell68
      35. thread@H_696_2@runsell67
      36. thread@H_696_2@gosell66
      37. thread@H_696_2@runsell65
      38. thread@H_696_2@gosell64
      39. thread@H_696_2@runsell63
      40. thread@H_696_2@gosell62
      41. thread@H_696_2@runsell61
      42. thread@H_696_2@gosell60
      43. thread@H_696_2@runsell59
      44. thread@H_696_2@gosell58
      45. thread@H_696_2@runsell57
      46. thread@H_696_2@gosell56
      47. thread@H_696_2@runsell55
      48. thread@H_696_2@gosell54
      49. thread@H_696_2@runsell53
      50. thread@H_696_2@runsell52
      51. thread@H_696_2@gosell51
      52. thread@H_696_2@runsell50
      53. thread@H_696_2@gosell49
      54. thread@H_696_2@runsell48
      55. thread@H_696_2@gosell47
      56. thread@H_696_2@runsell46
      57. thread@H_696_2@gosell45
      58. thread@H_696_2@runsell44
      59. thread@H_696_2@runsell43
      60. thread@H_696_2@gosell42
      61. thread@H_696_2@runsell41
      62. thread@H_696_2@runsell40
      63. thread@H_696_2@gosell39
      64. thread@H_696_2@runsell38
      65. thread@H_696_2@runsell37
      66. thread@H_696_2@runsell36
      67. thread@H_696_2@runsell35
      68. thread@H_696_2@gosell34
      69. thread@H_696_2@runsell33
      70. thread@H_696_2@runsell32
      71. thread@H_696_2@gosell31
      72. thread@H_696_2@runsell30
      73. thread@H_696_2@runsell29
      74. thread@H_696_2@runsell28
      75. thread@H_696_2@runsell27
      76. thread@H_696_2@runsell26
      77. thread@H_696_2@runsell25
      78. thread@H_696_2@gosell24
      79. thread@H_696_2@runsell23
      80. thread@H_696_2@gosell22
      81. thread@H_696_2@gosell21
      82. thread@H_696_2@runsell20
      83. thread@H_696_2@gosell19
      84. thread@H_696_2@runsell18
      85. thread@H_696_2@runsell17
      86. thread@H_696_2@gosell16
      87. thread@H_696_2@runsell15
      88. thread@H_696_2@gosell14
      89. thread@H_696_2@gosell13
      90. thread@H_696_2@runsell12
      91. thread@H_696_2@gosell11
      92. thread@H_696_2@gosell10
      93. thread@H_696_2@runsell9
      94. thread@H_696_2@gosell8
      95. thread@H_696_2@runsell7
      96. thread@H_696_2@gosell6
      97. thread@H_696_2@gosell5
      98. thread@H_696_2@runsell4
      99. thread@H_696_2@gosell3
      100. thread@H_696_2@runsell2
      101. thread@H_696_2@runsell1
      copy
        nameiszhycheng,sexismale@H_696_2@
      1. delete@H_696_2@data@H_696_2@@H_696_2@
      2. thread@H_696_2@runsell100@H_696_2@@H_696_2@
      3. thread@H_696_2@runsell99@H_696_2@@H_696_2@
      4. thread@H_696_2@gosell98@H_696_2@@H_696_2@
      5. thread@H_696_2@gosell97@H_696_2@@H_696_2@
      6. thread@H_696_2@runsell96@H_696_2@@H_696_2@
      7. thread@H_696_2@gosell95@H_696_2@@H_696_2@
      8. thread@H_696_2@gosell94@H_696_2@@H_696_2@
      9. thread@H_696_2@runsell93@H_696_2@@H_696_2@
      10. thread@H_696_2@gosell92@H_696_2@@H_696_2@
      11. thread@H_696_2@runsell91@H_696_2@@H_696_2@
      12. thread@H_696_2@gosell90@H_696_2@@H_696_2@
      13. thread@H_696_2@gosell89@H_696_2@@H_696_2@
      14. thread@H_696_2@runsell88@H_696_2@@H_696_2@
      15. thread@H_696_2@gosell87@H_696_2@@H_696_2@
      16. thread@H_696_2@runsell86@H_696_2@@H_696_2@
      17. thread@H_696_2@gosell85@H_696_2@@H_696_2@
      18. thread@H_696_2@runsell84@H_696_2@@H_696_2@
      19. thread@H_696_2@gosell83@H_696_2@@H_696_2@
      20. thread@H_696_2@runsell82@H_696_2@@H_696_2@
      21. thread@H_696_2@gosell81@H_696_2@@H_696_2@
      22. thread@H_696_2@runsell80@H_696_2@@H_696_2@
      23. thread@H_696_2@gosell79@H_696_2@@H_696_2@
      24. thread@H_696_2@runsell78@H_696_2@@H_696_2@
      25. thread@H_696_2@gosell77@H_696_2@@H_696_2@
      26. thread@H_696_2@runsell76@H_696_2@@H_696_2@
      27. thread@H_696_2@gosell75@H_696_2@@H_696_2@
      28. thread@H_696_2@runsell74@H_696_2@@H_696_2@
      29. thread@H_696_2@gosell73@H_696_2@@H_696_2@
      30. thread@H_696_2@runsell72@H_696_2@@H_696_2@
      31. thread@H_696_2@gosell71@H_696_2@@H_696_2@
      32. thread@H_696_2@runsell70@H_696_2@@H_696_2@
      33. thread@H_696_2@gosell69@H_696_2@@H_696_2@
      34. thread@H_696_2@gosell68@H_696_2@@H_696_2@
      35. thread@H_696_2@runsell67@H_696_2@@H_696_2@
      36. thread@H_696_2@gosell66@H_696_2@@H_696_2@
      37. thread@H_696_2@runsell65@H_696_2@@H_696_2@
      38. thread@H_696_2@gosell64@H_696_2@@H_696_2@
      39. thread@H_696_2@runsell63@H_696_2@@H_696_2@
      40. thread@H_696_2@gosell62@H_696_2@@H_696_2@
      41. thread@H_696_2@runsell61@H_696_2@@H_696_2@
      42. thread@H_696_2@gosell60@H_696_2@@H_696_2@
      43. thread@H_696_2@runsell59@H_696_2@@H_696_2@
      44. thread@H_696_2@gosell58@H_696_2@@H_696_2@
      45. thread@H_696_2@runsell57@H_696_2@@H_696_2@
      46. thread@H_696_2@gosell56@H_696_2@@H_696_2@
      47. thread@H_696_2@runsell55@H_696_2@@H_696_2@
      48. thread@H_696_2@gosell54@H_696_2@@H_696_2@
      49. thread@H_696_2@runsell53@H_696_2@@H_696_2@
      50. thread@H_696_2@runsell52@H_696_2@@H_696_2@
      51. thread@H_696_2@gosell51@H_696_2@@H_696_2@
      52. thread@H_696_2@runsell50@H_696_2@@H_696_2@
      53. thread@H_696_2@gosell49@H_696_2@@H_696_2@
      54. thread@H_696_2@runsell48@H_696_2@@H_696_2@
      55. thread@H_696_2@gosell47@H_696_2@@H_696_2@
      56. thread@H_696_2@runsell46@H_696_2@@H_696_2@
      57. thread@H_696_2@gosell45@H_696_2@@H_696_2@
      58. thread@H_696_2@runsell44@H_696_2@@H_696_2@
      59. thread@H_696_2@runsell43@H_696_2@@H_696_2@
      60. thread@H_696_2@gosell42@H_696_2@@H_696_2@
      61. thread@H_696_2@runsell41@H_696_2@@H_696_2@
      62. thread@H_696_2@runsell40@H_696_2@@H_696_2@
      63. thread@H_696_2@gosell39@H_696_2@@H_696_2@
      64. thread@H_696_2@runsell38@H_696_2@@H_696_2@
      65. thread@H_696_2@runsell37@H_696_2@@H_696_2@
      66. thread@H_696_2@runsell36@H_696_2@@H_696_2@
      67. thread@H_696_2@runsell35@H_696_2@@H_696_2@
      68. thread@H_696_2@gosell34@H_696_2@@H_696_2@
      69. thread@H_696_2@runsell33@H_696_2@@H_696_2@
      70. thread@H_696_2@runsell32@H_696_2@@H_696_2@
      71. thread@H_696_2@gosell31@H_696_2@@H_696_2@
      72. thread@H_696_2@runsell30@H_696_2@@H_696_2@
      73. thread@H_696_2@runsell29@H_696_2@@H_696_2@
      74. thread@H_696_2@runsell28@H_696_2@@H_696_2@
      75. thread@H_696_2@runsell27@H_696_2@@H_696_2@
      76. thread@H_696_2@runsell26@H_696_2@@H_696_2@
      77. thread@H_696_2@runsell25@H_696_2@@H_696_2@
      78. thread@H_696_2@gosell24@H_696_2@@H_696_2@
      79. thread@H_696_2@runsell23@H_696_2@@H_696_2@
      80. thread@H_696_2@gosell22@H_696_2@@H_696_2@
      81. thread@H_696_2@gosell21@H_696_2@@H_696_2@
      82. thread@H_696_2@runsell20@H_696_2@@H_696_2@
      83. thread@H_696_2@gosell19@H_696_2@@H_696_2@
      84. thread@H_696_2@runsell18@H_696_2@@H_696_2@
      85. thread@H_696_2@runsell17@H_696_2@@H_696_2@
      86. thread@H_696_2@gosell16@H_696_2@@H_696_2@
      87. thread@H_696_2@runsell15@H_696_2@@H_696_2@
      88. thread@H_696_2@gosell14@H_696_2@@H_696_2@
      89. thread@H_696_2@gosell13@H_696_2@@H_696_2@
      90. thread@H_696_2@runsell12@H_696_2@@H_696_2@
      91. thread@H_696_2@gosell11@H_696_2@@H_696_2@
      92. thread@H_696_2@gosell10@H_696_2@@H_696_2@
      93. thread@H_696_2@runsell9@H_696_2@@H_696_2@
      94. thread@H_696_2@gosell8@H_696_2@@H_696_2@
      95. thread@H_696_2@runsell7@H_696_2@@H_696_2@
      96. thread@H_696_2@gosell6@H_696_2@@H_696_2@
      97. thread@H_696_2@gosell5@H_696_2@@H_696_2@
      98. thread@H_696_2@runsell4@H_696_2@@H_696_2@
      99. thread@H_696_2@gosell3@H_696_2@@H_696_2@
      100. thread@H_696_2@runsell2@H_696_2@@H_696_2@
      101. thread@H_696_2@runsell1@H_696_2@@H_696_2@


      可以看到,这个打印结果正确无误。如果不加mutex会是什么样的结果呢,我将线程同步的mutex注释掉,输出的结果为:@H_696_2@

      copy
        thread@H_696_2@runsell98
      1. thread@H_696_2@runsell94
      2. thread@H_696_2@gosell94
      3. thread@H_696_2@runsell92
      4. thread@H_696_2@runsell89
      5. thread@H_696_2@gosell88
      6. thread@H_696_2@runsell87
      7. thread@H_696_2@gosell86
      8. thread@H_696_2@gosell84
      9. thread@H_696_2@runsell83
      10. thread@H_696_2@gosell82
      11. thread@H_696_2@runsell81
      12. thread@H_696_2@gosell80
      13. thread@H_696_2@runsell79
      14. thread@H_696_2@runsell75
      15. thread@H_696_2@gosell74
      16. thread@H_696_2@runsell73
      17. thread@H_696_2@gosell72
      18. thread@H_696_2@runsell71
      19. thread@H_696_2@gosell70
      20. thread@H_696_2@runsell68
      21. thread@H_696_2@gosell67
      22. thread@H_696_2@gosell63
      23. thread@H_696_2@runsell62
      24. thread@H_696_2@gosell61
      25. thread@H_696_2@runsell60
      26. thread@H_696_2@runsell58
      27. thread@H_696_2@runsell56
      28. thread@H_696_2@runsell54
      29. thread@H_696_2@gosell52
      30. thread@H_696_2@runsell52
      31. thread@H_696_2@gosell50
      32. thread@H_696_2@runsell50
      33. thread@H_696_2@gosell49
      34. thread@H_696_2@runsell47
      35. thread@H_696_2@gosell47
      36. thread@H_696_2@runsell45
      37. thread@H_696_2@runsell43threadgosell43
      38. thread@H_696_2@gosell41
      39. thread@H_696_2@runsell39
      40. thread@H_696_2@gosell37
      41. thread@H_696_2@gosell35
      42. thread@H_696_2@runsell35
      43. thread@H_696_2@gosell33threadrunsell33
      44. thread@H_696_2@gosell31threadrunsell31
      45. thread@H_696_2@gosell29
      46. thread@H_696_2@runsell29
      47. thread@H_696_2@gosell27
      48. thread@H_696_2@runsell27
      49. thread@H_696_2@gosell25
      50. thread@H_696_2@runsell25
      51. thread@H_696_2@gosell23
      52. thread@H_696_2@runsell21
      53. thread@H_696_2@gosell21
      54. thread@H_696_2@runsell19
      55. thread@H_696_2@gosell17
      56. thread@H_696_2@gosell15
      57. thread@H_696_2@runsell15
      58. thread@H_696_2@runsell13
      59. thread@H_696_2@gosell13
      60. thread@H_696_2@runsell11threadgosell11
      61. thread@H_696_2@gosell9
      62. thread@H_696_2@runsell9
      63. thread@H_696_2@gosell7
      64. thread@H_696_2@gosell5threadrunsell5
      65. thread@H_696_2@runsell3
      66. thread@H_696_2@gosell1
      67. copy
          thread@H_696_2@runsell98@H_696_2@@H_696_2@
        1. thread@H_696_2@runsell94@H_696_2@@H_696_2@
        2. thread@H_696_2@gosell94@H_696_2@@H_696_2@
        3. thread@H_696_2@runsell92@H_696_2@@H_696_2@
        4. thread@H_696_2@runsell89@H_696_2@@H_696_2@
        5. thread@H_696_2@gosell88@H_696_2@@H_696_2@
        6. thread@H_696_2@runsell87@H_696_2@@H_696_2@
        7. thread@H_696_2@gosell86@H_696_2@@H_696_2@
        8. thread@H_696_2@gosell84@H_696_2@@H_696_2@
        9. thread@H_696_2@runsell83@H_696_2@@H_696_2@
        10. thread@H_696_2@gosell82@H_696_2@@H_696_2@
        11. thread@H_696_2@runsell81@H_696_2@@H_696_2@
        12. thread@H_696_2@gosell80@H_696_2@@H_696_2@
        13. thread@H_696_2@runsell79@H_696_2@@H_696_2@
        14. thread@H_696_2@runsell75@H_696_2@@H_696_2@
        15. thread@H_696_2@gosell74@H_696_2@@H_696_2@
        16. thread@H_696_2@runsell73@H_696_2@@H_696_2@
        17. thread@H_696_2@gosell72@H_696_2@@H_696_2@
        18. thread@H_696_2@runsell71@H_696_2@@H_696_2@
        19. thread@H_696_2@gosell70@H_696_2@@H_696_2@
        20. thread@H_696_2@runsell68@H_696_2@@H_696_2@
        21. thread@H_696_2@gosell67@H_696_2@@H_696_2@
        22. thread@H_696_2@gosell63@H_696_2@@H_696_2@
        23. thread@H_696_2@runsell62@H_696_2@@H_696_2@
        24. thread@H_696_2@gosell61@H_696_2@@H_696_2@
        25. thread@H_696_2@runsell60@H_696_2@@H_696_2@
        26. thread@H_696_2@runsell58@H_696_2@@H_696_2@
        27. thread@H_696_2@runsell56@H_696_2@@H_696_2@
        28. thread@H_696_2@runsell54@H_696_2@@H_696_2@
        29. thread@H_696_2@gosell52@H_696_2@@H_696_2@
        30. thread@H_696_2@runsell52@H_696_2@@H_696_2@
        31. thread@H_696_2@gosell50@H_696_2@@H_696_2@
        32. thread@H_696_2@runsell50@H_696_2@@H_696_2@
        33. thread@H_696_2@gosell49@H_696_2@@H_696_2@
        34. thread@H_696_2@runsell47@H_696_2@@H_696_2@
        35. thread@H_696_2@gosell47@H_696_2@@H_696_2@
        36. thread@H_696_2@runsell45@H_696_2@@H_696_2@
        37. thread@H_696_2@runsell43threadgosell43@H_696_2@@H_696_2@
        38. thread@H_696_2@gosell41@H_696_2@@H_696_2@
        39. thread@H_696_2@runsell39@H_696_2@@H_696_2@
        40. thread@H_696_2@gosell37@H_696_2@@H_696_2@
        41. thread@H_696_2@gosell35@H_696_2@@H_696_2@
        42. thread@H_696_2@runsell35@H_696_2@@H_696_2@
        43. thread@H_696_2@gosell33threadrunsell33@H_696_2@@H_696_2@
        44. thread@H_696_2@gosell31threadrunsell31@H_696_2@@H_696_2@
        45. thread@H_696_2@gosell29@H_696_2@@H_696_2@
        46. thread@H_696_2@runsell29@H_696_2@@H_696_2@
        47. thread@H_696_2@gosell27@H_696_2@@H_696_2@
        48. thread@H_696_2@runsell27@H_696_2@@H_696_2@
        49. thread@H_696_2@gosell25@H_696_2@@H_696_2@
        50. thread@H_696_2@runsell25@H_696_2@@H_696_2@
        51. thread@H_696_2@gosell23@H_696_2@@H_696_2@
        52. thread@H_696_2@runsell21@H_696_2@@H_696_2@
        53. thread@H_696_2@gosell21@H_696_2@@H_696_2@
        54. thread@H_696_2@runsell19@H_696_2@@H_696_2@
        55. thread@H_696_2@gosell17@H_696_2@@H_696_2@
        56. thread@H_696_2@gosell15@H_696_2@@H_696_2@
        57. thread@H_696_2@runsell15@H_696_2@@H_696_2@
        58. thread@H_696_2@runsell13@H_696_2@@H_696_2@
        59. thread@H_696_2@gosell13@H_696_2@@H_696_2@
        60. thread@H_696_2@runsell11threadgosell11@H_696_2@@H_696_2@
        61. thread@H_696_2@gosell9@H_696_2@@H_696_2@
        62. thread@H_696_2@runsell9@H_696_2@@H_696_2@
        63. thread@H_696_2@gosell7@H_696_2@@H_696_2@
        64. thread@H_696_2@gosell5threadrunsell5@H_696_2@@H_696_2@
        65. thread@H_696_2@runsell3@H_696_2@@H_696_2@
        66. thread@H_696_2@gosell1@H_696_2@@H_696_2@
        67. thread@H_696_2@runsell1@H_696_2@@H_696_2@


        可以看到,有的票卖了两次,有的票就没卖。@H_696_2@

        4.注意@H_696_2@

        1.Sleep()函数是使得线程休眠的函数,这个函数不跨平台,仅仅在windows上能用,其他平台使用usleep。@H_696_2@

        2.在非主线程中不能使用COcos2d-x管理内存的CCObject::retain(),@H_696_2@CCObject::release()者@H_696_2@CCObject::autorelease(),因为CCAutoreleasePool不是线程安全的,OPENGL的上下文也不是线程安全的,所以不要再非主线程中使用COcos2d-x的API和UI操作。@H_696_2@@H_696_2@@H_696_2@


        @H_696_2@
        @H_696_2@@H_696_2@


        cocos2dx内存管理与多线程问题:
        @H_696_2@
        @H_696_2@@H_696_2@

        Cocos2d-x的内存管理采用Objective-C的机制,大喜过望。因为只要坚持Objective-C的原则“谁创建谁释放,谁备份谁释放”的原则即可确保内存使用不易出现Bug。@H_696_2@
        @H_829_2620@但是因为本身开放的游戏需要使用到多线程技术,导致测试的时候总是莫名其妙的导致空指针错误。而且是随机出现,纠结了2天无果后,开始怀疑Cocos2d-X的内存本身管理可能存在问题。怀着这样的想法,@H_696_2@@H_18_2622@一步一步的调试,发现经常出现指针异常的变量总是在调用autorelease后一会就莫名其妙再使用的时候就抛异常。狠下心,在它的析构函数里面断点+Log输出信息。发现对象被释放了。一时也很迷糊,因为对象只是@H_696_2@@H_18_2622@autorelease,并没有真正释放,是谁导致它释放的?@H_696_2@
        @H_18_2622@然后就去看了CCAutoreleasePool的源码,发现存在Cocos2d-X的内存管理在多线程的情况下存在如下问题@H_696_2@@H_18_2622@ @H_829_2620@如图:thread 1和thread 2是独立的两个线程,它们之间存在CPU分配的交叉集,我们在time 1的时候push一个autorelease的自动释放池,在该线程的末尾,即time 3的时候pop它。同理在thread 2的线程里面,在time 2的时候push一个自动释放池,在time 4的时候释放它,即Pop.@H_696_2@@H_18_2622@此时我们假设在thread 2分配得到CPU的时候有一个对象obj自动释放,即obj-autorelease().@H_696_2@那么在time 3的时候会发生是么事情呢?@H_696_2@
        答案很简单,就是obj在time 3的时候就被释放了,而我们期望它在time 4的时候才释放。所以就导致我上面说的,在多线程下面,cocos2d-x的autorelease变量会发生莫名其妙的指针异常。@H_696_2@
        解决办法:@H_696_2@@H_829_2620@在PoolManager给每个线程根据pthread_t的线程id生成一个CCArray的stack的嵌套管理自动释放池。源码如下@H_696_2@@H_18_2622@所以我在Push的时候根据当前线程的pthread_t的线程id生成一个CCArray的stack来存储该线程对应的Autoreleasepool的嵌套对象@H_696_2@@H_18_2622@源码如下@H_696_2@

        @H_829_2620@ @H_696_2@@H_696_2@
        @H_696_2@@H_696_2@

        1

        2

        3

        4

        5

        6

        7

        8

        9

        10

        11

        12

        13

        14

        15

        16

        17

        18

        19

        20

        21

        22

        23

        24

        25

        26

        27

        28

        29

        30

        31

        32

        33

        34

        35

        36

        37

        38

        39

        40

        41

        42

        43

        44

        45

        46

        47

        48

        49

        50

        51

        52

        53

        54

        55

        56

        57

        58

        59

        60

        61

        62

        63

        64

        65

        66

        67

        68

        69

        70

        71

        72

        73

        74

        75

        76

        77

        78

        79

        80

        81

        82

        83

        84

        85

        86

        87

        88

        89

        90

        91

        92

        93

        94

        95

        96

        97

        98

        99

        100

        101

        102

        103

        104

        105

        106

        107

        108

        109

        110

        111

        112

        113

        114

        115

        116

        117

        118

        119

        120

        121

        122

        123

        124

        125

        126

        127

        128

        129

        130

        131

        132

        133

        134

        135

        136

        137

        138

        139

        140

        141

        142

        143

        144

        145

        146

        147

        148

        149

        150

        151

        152

        153

        154

        155

        156

        157

        158

        159

        160

        161

        162

        163

        164

        165

        166

        167

        168

        169

        170

        171

        172

        173

        174

        175

        176

        177

        178

        179

        180

        181

        182

        183

        184

        185

        186

        187

        188

        189

        190

        191

        192

        193

        194

        195

        196

        197

        198

        199

        200

        201

        202

        203

        204

        205

        206

        207

        208

        209

        210

        //--------------------------------------------------------------------

        //

        // CCPoolManager

        //

        //--------------------------------------------------------------------


        /////【diff - begin】- by layne//////


        CCPoolManager* CCPoolManager::sharedPoolManager()

        {

        if (s_pPoolManager == NULL)

        {

        s_pPoolManager = new CCPoolManager();

        }

        return s_pPoolManager;

        }


        void CCPoolManager::purgePoolManager()

        {

        CC_SAFE_deletE(s_pPoolManager);

        }


        CCPoolManager::CCPoolManager()

        {

        // m_pReleasePoolStack = new CCArray();

        // m_pReleasePoolStack->init();

        // m_pCurReleasePool = 0;


        m_pReleasePoolMultiStack = new CCDictionary();

        }


        CCPoolManager::~CCPoolManager()

        {


        // finalize();


        // // we only release the last autorelease pool here

        // m_pCurReleasePool = 0;

        // m_pReleasePoolStack->removeObjectATindex(0);

        //

        // CC_SAFE_deletE(m_pReleasePoolStack);


        finalize();


        CC_SAFE_deletE(m_pReleasePoolMultiStack);

        }


        void CCPoolManager::finalize()

        {

        if(m_pReleasePoolMultiStack->count() > 0)

        {

        //CCAutoreleasePool* pReleasePool;

        CCObject* pkey = NULL;

        CCARRAY_FOREACH(m_pReleasePoolMultiStack->allKeys(),pkey)

        {

        if(!pkey)

        break;

        CCInteger *key = (CCInteger*)pkey;

        CCArray *poolStack = (CCArray *)m_pReleasePoolMultiStack->objectForKey(key->getValue());

        CCObject* pObj = NULL;

        CCARRAY_FOREACH(poolStack,pObj)

        {

        if(!pObj)

        break;

        CCAutoreleasePool* pPool = (CCAutoreleasePool*)pObj;

        pPool->clear();

        }

        }

        }

        }


        void CCPoolManager::push()

        {

        // CCAutoreleasePool* pPool = new CCAutoreleasePool(); //ref = 1

        // m_pCurReleasePool = pPool;

        //

        // m_pReleasePoolStack->addObject(pPool); //ref = 2

        //

        // pPool->release(); //ref = 1


        pthread_mutex_lock(&m_muteX);


        CCArray* pCurReleasePoolStack = getCurReleasePoolStack();

        CCAutoreleasePool* pPool = new CCAutoreleasePool(); //ref = 1

        pCurReleasePoolStack->addObject(pPool); //ref = 2

        pPool->release(); //ref = 1


        pthread_mutex_unlock(&m_muteX);

        }


        void CCPoolManager::pop()

        {

        // if (! m_pCurReleasePool)

        // {

        // return;

        // }

        //

        // int nCount = m_pReleasePoolStack->count();

        //

        // m_pCurReleasePool->clear();

        //

        // if(nCount > 1)

        // {

        // m_pReleasePoolStack->removeObjectATindex(nCount-1);

        //

        // // if(nCount > 1)

        // // {

        // // m_pCurReleasePool = m_pReleasePoolStack->objectATindex(nCount - 2);

        // // return;

        // // }

        // m_pCurReleasePool = (CCAutoreleasePool*)m_pReleasePoolStack->objectATindex(nCount - 2);

        // }

        //

        // /*m_pCurReleasePool = NULL;*/


        pthread_mutex_lock(&m_muteX);


        CCArray* pCurReleasePoolStack = getCurReleasePoolStack();

        CCAutoreleasePool* pCurReleasePool = getCurReleasePool();

        if (pCurReleasePoolStack && pCurReleasePool)

        {

        int nCount = pCurReleasePoolStack->count();


        pCurReleasePool->clear();


        if(nCount > 1)

        {

        pCurReleasePoolStack->removeObject(pCurReleasePool);

        }

        }


        pthread_mutex_unlock(&m_muteX);

        }


        void CCPoolManager::removeObject(CCObject* pObject)

        {

        // CCassert(m_pCurReleasePool,"current auto release pool should not be null");

        //

        // m_pCurReleasePool->removeObject(pObject);


        pthread_mutex_lock(&m_muteX);

        CCAutoreleasePool* pCurReleasePool = getCurReleasePool();

        CCassert(pCurReleasePool,"current auto release pool should not be null");


        pCurReleasePool->removeObject(pObject);

        pthread_mutex_unlock(&m_muteX);

        }


        void CCPoolManager::addObject(CCObject* pObject)

        {

        // getCurReleasePool()->addObject(pObject);


        pthread_mutex_lock(&m_muteX);

        CCAutoreleasePool* pCurReleasePool = getCurReleasePool(true);

        CCassert(pCurReleasePool,"current auto release pool should not be null");


        pCurReleasePool->addObject(pObject);

        pthread_mutex_unlock(&m_muteX);

        }


        CCArray* CCPoolManager::getCurReleasePoolStack()

        {

        CCArray* pPoolStack = NULL;

        pthread_t tid = pthread_self();

        if(m_pReleasePoolMultiStack->count() > 0)

        {

        pPoolStack = (CCArray*)m_pReleasePoolMultiStack->objectForKey((int)tid);

        }


        if (!pPoolStack) {

        pPoolStack = new CCArray();

        m_pReleasePoolMultiStack->setObject(pPoolStack,(int)tid);

        pPoolStack->release();

        }


        return pPoolStack;

        }


        CCAutoreleasePool* CCPoolManager::getCurReleasePool(bool auto@R_675_5746@

        {

        // if(!m_pCurReleasePool)

        // {

        // push();

        // }

        //

        // CCassert(m_pCurReleasePool,"current auto release pool should not be null");

        //

        // return m_pCurReleasePool;


        CCAutoreleasePool* pReleasePool = NULL;



        CCArray* pPoolStack = getCurReleasePoolStack();

        if(pPoolStack->count() > 0)

        {

        pReleasePool = (CCAutoreleasePool*)pPoolStack->lastObject();

        }


        if (!pReleasePool && auto@R_675_5746@ {

        CCAutoreleasePool* pPool = new CCAutoreleasePool(); //ref = 1

        pPoolStack->addObject(pPool); //ref = 2

        pPool->release(); //ref = 1


        pReleasePool = pPool;

        }


        return pReleasePool;

        }


        /////【diff - end】- by layne//////



        代码下载地址:https://github.com/Kaitiren/pthread-test-for-cocos2dx

        大佬总结

        以上是大佬教程为你收集整理的cocos2dx多线程以及线程同步 与 cocos2dx内存管理与多线程问题全部内容,希望文章能够帮你解决cocos2dx多线程以及线程同步 与 cocos2dx内存管理与多线程问题所遇到的程序开发问题。

        如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。

        本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
        如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。