client.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. # filename: client.py
  4. '''
  5. Client module for Fastdfs 3.08
  6. author: scott yuan scottzer8@gmail.com
  7. date: 2012-06-21
  8. '''
  9. import os, sys
  10. from fdfs_client.utils import *
  11. from fdfs_client.tracker_client import *
  12. from fdfs_client.storage_client import *
  13. from fdfs_client.exceptions import *
  14. def get_tracker_conf(conf_path = 'client.conf'):
  15. cf = Fdfs_ConfigParser()
  16. tracker = {}
  17. try:
  18. cf.read(conf_path)
  19. timeout = cf.getint('__config__', 'connect_timeout')
  20. tracker_list = cf.get('__config__', 'tracker_server')
  21. if isinstance(tracker_list, str):
  22. tracker_list = [tracker_list]
  23. tracker_ip_list = []
  24. for tr in tracker_list:
  25. tracker_ip, tracker_port = tr.split(':')
  26. tracker_ip_list.append((tracker_ip, tracker_port))
  27. tracker['host_tuple'] = tuple(tracker_ip_list)
  28. tracker['timeout'] = timeout
  29. tracker['name'] = 'Tracker Pool'
  30. except:
  31. raise
  32. return tracker
  33. class Fdfs_client(object):
  34. '''
  35. Class Fdfs_client implemented Fastdfs client protol ver 3.08.
  36. It's useful upload, download, delete file to or from fdfs server, etc. It's uses
  37. connection pool to manage connection to server.
  38. '''
  39. def __init__(self, conf_path = '/etc/fdfs/client.conf', \
  40. poolclass =ConnectionPool):
  41. self.trackers = get_tracker_conf(conf_path)
  42. self.tracker_pool = poolclass(**self.trackers)
  43. self.timeout = self.trackers['timeout']
  44. self.storages = {}
  45. return None
  46. def __del__(self):
  47. try:
  48. self.pool.destroy()
  49. self.pool = None
  50. except:
  51. pass
  52. def get_storage(self, store_serv):
  53. store = self.storages.get((store_serv.ip_addr, store_serv.port), None)
  54. if store is None:
  55. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  56. self.storages[(store_serv.ip_addr, store_serv.port)] = store
  57. return store
  58. def get_store_serv(self, remote_file_id):
  59. '''
  60. Get store server info by remote_file_id.
  61. @author: LeoTse
  62. @param remote_file_id: string, file_id of file that is on storage server
  63. @return Storage_server object
  64. '''
  65. tmp = split_remote_fileid(remote_file_id)
  66. if not tmp:
  67. raise DataError('[-] Error: remote_file_id is invalid.(in delete file)')
  68. group_name, remote_filename = tmp
  69. tc = Tracker_client(self.tracker_pool)
  70. store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
  71. return store_serv
  72. def upload_by_filename(self, filename, meta_dict = None):
  73. '''
  74. Upload a file to Storage server.
  75. arguments:
  76. @filename: string, name of file that will be uploaded
  77. @meta_dict: dictionary e.g.:{
  78. 'ext_name' : 'jpg',
  79. 'file_size' : '10240B',
  80. 'width' : '160px',
  81. 'hight' : '80px'
  82. } meta_dict can be null
  83. @return dict {
  84. 'Group name' : group_name,
  85. 'Remote file_id' : remote_file_id,
  86. 'Status' : 'Upload successed.',
  87. 'Local file name' : local_file_name,
  88. 'Uploaded size' : upload_size,
  89. 'Storage IP' : storage_ip
  90. } if success else None
  91. '''
  92. isfile, errmsg = fdfs_check_file(filename)
  93. if not isfile:
  94. raise DataError(errmsg + '(uploading)')
  95. tc = Tracker_client(self.tracker_pool)
  96. store_serv = tc.tracker_query_storage_stor_without_group()
  97. return self.get_storage(store_serv).storage_upload_by_filename(tc, store_serv, filename, meta_dict)
  98. def upload_by_file(self, filename, meta_dict = None):
  99. isfile, errmsg = fdfs_check_file(filename)
  100. if not isfile:
  101. raise DataError(errmsg + '(uploading)')
  102. tc = Tracker_client(self.tracker_pool)
  103. store_serv = tc.tracker_query_storage_stor_without_group()
  104. return self.get_storage(store_serv).storage_upload_by_file(tc, store_serv, filename, meta_dict)
  105. def upload_by_buffer(self, filebuffer, file_ext_name = None, meta_dict = None):
  106. '''
  107. Upload a buffer to Storage server.
  108. arguments:
  109. @filebuffer: string, buffer
  110. @file_ext_name: string, file extend name
  111. @meta_dict: dictionary e.g.:{
  112. 'ext_name' : 'jpg',
  113. 'file_size' : '10240B',
  114. 'width' : '160px',
  115. 'hight' : '80px'
  116. }
  117. @return dict {
  118. 'Group name' : group_name,
  119. 'Remote file_id' : remote_file_id,
  120. 'Status' : 'Upload successed.',
  121. 'Local file name' : '',
  122. 'Uploaded size' : upload_size,
  123. 'Storage IP' : storage_ip
  124. } if success else None
  125. '''
  126. if not filebuffer:
  127. raise DataError('[-] Error: argument filebuffer can not be null.')
  128. tc = Tracker_client(self.tracker_pool)
  129. store_serv = tc.tracker_query_storage_stor_without_group()
  130. return self.get_storage(store_serv).storage_upload_by_buffer(tc, store_serv, filebuffer, \
  131. file_ext_name, meta_dict)
  132. def upload_slave_by_filename(self, filename, remote_file_id, prefix_name, \
  133. meta_dict = None):
  134. '''
  135. Upload slave file to Storage server.
  136. arguments:
  137. @filename: string, local file name
  138. @remote_file_id: string, remote file id
  139. @prefix_name: string
  140. @meta_dict: dictionary e.g.:{
  141. 'ext_name' : 'jpg',
  142. 'file_size' : '10240B',
  143. 'width' : '160px',
  144. 'hight' : '80px'
  145. }
  146. @return dictionary {
  147. 'Status' : 'Upload slave successed.',
  148. 'Local file name' : local_filename,
  149. 'Uploaded size' : upload_size,
  150. 'Remote file id' : remote_file_id,
  151. 'Storage IP' : storage_ip
  152. }
  153. '''
  154. isfile, errmsg = fdfs_check_file(filename)
  155. if not isfile:
  156. raise DataError(errmsg + '(uploading slave)')
  157. tmp = split_remote_fileid(remote_file_id)
  158. if not tmp:
  159. raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
  160. if not prefix_name:
  161. raise DataError('[-] Error: prefix_name can not be null.')
  162. group_name, remote_filename = tmp
  163. tc = Tracker_client(self.tracker_pool)
  164. store_serv = tc.tracker_query_storage_stor_with_group(group_name)
  165. store = self.get_storage(store_serv)
  166. try:
  167. ret_dict = store.storage_upload_slave_by_filename(tc, store_serv, filename, \
  168. prefix_name, remote_filename, \
  169. meta_dict = None)
  170. except:
  171. raise
  172. ret_dict['Status'] = 'Upload slave file successed.'
  173. return ret_dict
  174. def upload_slave_by_file(self, filename, remote_file_id, prefix_name, \
  175. meta_dict = None):
  176. '''
  177. Upload slave file to Storage server.
  178. arguments:
  179. @filename: string, local file name
  180. @remote_file_id: string, remote file id
  181. @prefix_name: string
  182. @meta_dict: dictionary e.g.:{
  183. 'ext_name' : 'jpg',
  184. 'file_size' : '10240B',
  185. 'width' : '160px',
  186. 'hight' : '80px'
  187. }
  188. @return dictionary {
  189. 'Status' : 'Upload slave successed.',
  190. 'Local file name' : local_filename,
  191. 'Uploaded size' : upload_size,
  192. 'Remote file id' : remote_file_id,
  193. 'Storage IP' : storage_ip
  194. }
  195. '''
  196. isfile, errmsg = fdfs_check_file(filename)
  197. if not isfile:
  198. raise DataError(errmsg + '(uploading slave)')
  199. tmp = split_remote_fileid(remote_file_id)
  200. if not tmp:
  201. raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
  202. if not prefix_name:
  203. raise DataError('[-] Error: prefix_name can not be null.')
  204. group_name, remote_filename = tmp
  205. tc = Tracker_client(self.tracker_pool)
  206. store_serv = tc.tracker_query_storage_stor_with_group(group_name)
  207. store = self.get_storage(store_serv)
  208. try:
  209. ret_dict = store.storage_upload_slave_by_file(tc, store_serv, filename, \
  210. prefix_name, remote_filename, \
  211. meta_dict = None)
  212. except:
  213. raise
  214. ret_dict['Status'] = 'Upload slave file successed.'
  215. return ret_dict
  216. def upload_slave_by_buffer(self, filebuffer, remote_file_id, \
  217. meta_dict = None, file_ext_name = None):
  218. '''
  219. Upload slave file by buffer
  220. arguments:
  221. @filebuffer: string
  222. @remote_file_id: string
  223. @meta_dict: dictionary e.g.:{
  224. 'ext_name' : 'jpg',
  225. 'file_size' : '10240B',
  226. 'width' : '160px',
  227. 'hight' : '80px'
  228. }
  229. @return dictionary {
  230. 'Status' : 'Upload slave successed.',
  231. 'Local file name' : local_filename,
  232. 'Uploaded size' : upload_size,
  233. 'Remote file id' : remote_file_id,
  234. 'Storage IP' : storage_ip
  235. }
  236. '''
  237. if not filebuffer:
  238. raise DataError('[-] Error: argument filebuffer can not be null.')
  239. tmp = split_remote_fileid(remote_file_id)
  240. if not tmp:
  241. raise DataError('[-] Error: remote_file_id is invalid.(uploading slave)')
  242. group_name, remote_filename = tmp
  243. tc = Tracker_client(self.tracker_pool)
  244. store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
  245. store = self.get_storage(store_serv)
  246. return store.storage_upload_slave_by_buffer(tc, store_serv, filebuffer, \
  247. remote_filename, meta_dict, \
  248. file_ext_name)
  249. def upload_appender_by_filename(self, local_filename, meta_dict = None):
  250. '''
  251. Upload an appender file by filename.
  252. arguments:
  253. @local_filename: string
  254. @meta_dict: dictionary e.g.:{
  255. 'ext_name' : 'jpg',
  256. 'file_size' : '10240B',
  257. 'width' : '160px',
  258. 'hight' : '80px'
  259. } Notice: it can be null
  260. @return dict {
  261. 'Group name' : group_name,
  262. 'Remote file_id' : remote_file_id,
  263. 'Status' : 'Upload successed.',
  264. 'Local file name' : '',
  265. 'Uploaded size' : upload_size,
  266. 'Storage IP' : storage_ip
  267. } if success else None
  268. '''
  269. isfile, errmsg = fdfs_check_file(local_filename)
  270. if not isfile:
  271. raise DataError(errmsg + '(uploading appender)')
  272. tc = Tracker_client(self.tracker_pool)
  273. store_serv = tc.tracker_query_storage_stor_without_group()
  274. store = self.get_storage(store_serv)
  275. return store.storage_upload_appender_by_filename(tc, store_serv, \
  276. local_filename, meta_dict)
  277. def upload_appender_by_file(self, local_filename, meta_dict = None):
  278. '''
  279. Upload an appender file by file.
  280. arguments:
  281. @local_filename: string
  282. @meta_dict: dictionary e.g.:{
  283. 'ext_name' : 'jpg',
  284. 'file_size' : '10240B',
  285. 'width' : '160px',
  286. 'hight' : '80px'
  287. } Notice: it can be null
  288. @return dict {
  289. 'Group name' : group_name,
  290. 'Remote file_id' : remote_file_id,
  291. 'Status' : 'Upload successed.',
  292. 'Local file name' : '',
  293. 'Uploaded size' : upload_size,
  294. 'Storage IP' : storage_ip
  295. } if success else None
  296. '''
  297. isfile, errmsg = fdfs_check_file(local_filename)
  298. if not isfile:
  299. raise DataError(errmsg + '(uploading appender)')
  300. tc = Tracker_client(self.tracker_pool)
  301. store_serv = tc.tracker_query_storage_stor_without_group()
  302. store = self.get_storage(store_serv)
  303. return store.storage_upload_appender_by_file(tc, store_serv, \
  304. local_filename, meta_dict)
  305. def upload_appender_by_buffer(self, filebuffer, file_ext_name = None, meta_dict = None):
  306. '''
  307. Upload a buffer to Storage server.
  308. arguments:
  309. @filebuffer: string
  310. @file_ext_name: string, can be null
  311. @meta_dict: dictionary, can be null
  312. @return dict {
  313. 'Group name' : group_name,
  314. 'Remote file_id' : remote_file_id,
  315. 'Status' : 'Upload successed.',
  316. 'Local file name' : '',
  317. 'Uploaded size' : upload_size,
  318. 'Storage IP' : storage_ip
  319. } if success else None
  320. '''
  321. if not filebuffer:
  322. raise DataError('[-] Error: argument filebuffer can not be null.')
  323. tc = Tracker_client(self.tracker_pool)
  324. store_serv = tc.tracker_query_storage_stor_without_group()
  325. store = self.get_storage(store_serv)
  326. return store.storage_upload_appender_by_buffer(tc, store_serv, \
  327. filebuffer, meta_dict, \
  328. file_ext_name)
  329. def delete_file(self, remote_file_id):
  330. '''
  331. Delete a file from Storage server.
  332. arguments:
  333. @remote_file_id: string, file_id of file that is on storage server
  334. @return tuple ('Delete file successed.', remote_file_id, storage_ip)
  335. '''
  336. tmp = split_remote_fileid(remote_file_id)
  337. if not tmp:
  338. raise DataError('[-] Error: remote_file_id is invalid.(in delete file)')
  339. group_name, remote_filename = tmp
  340. tc = Tracker_client(self.tracker_pool)
  341. store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
  342. store = self.get_storage(store_serv)
  343. return store.storage_delete_file(tc, store_serv, remote_filename)
  344. def download_to_file(self, local_filename, remote_file_id, offset = 0, down_bytes = 0):
  345. '''
  346. Download a file from Storage server.
  347. arguments:
  348. @local_filename: string, local name of file
  349. @remote_file_id: string, file_id of file that is on storage server
  350. @offset: long
  351. @downbytes: long
  352. @return dict {
  353. 'Remote file_id' : remote_file_id,
  354. 'Content' : local_filename,
  355. 'Download size' : downloaded_size,
  356. 'Storage IP' : storage_ip
  357. }
  358. '''
  359. tmp = split_remote_fileid(remote_file_id)
  360. if not tmp:
  361. raise DataError('[-] Error: remote_file_id is invalid.(in download file)')
  362. group_name, remote_filename = tmp
  363. if not offset:
  364. file_offset = long(offset)
  365. if not down_bytes:
  366. download_bytes = long(down_bytes)
  367. tc = Tracker_client(self.tracker_pool)
  368. store_serv = tc.tracker_query_storage_fetch(group_name, remote_filename)
  369. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  370. return store.storage_download_to_file(tc, store_serv, local_filename, \
  371. file_offset, download_bytes, \
  372. remote_filename)
  373. def download_to_buffer(self, remote_file_id, offset = 0, down_bytes = 0):
  374. '''
  375. Download a file from Storage server and store in buffer.
  376. arguments:
  377. @remote_file_id: string, file_id of file that is on storage server
  378. @offset: long
  379. @down_bytes: long
  380. @return dict {
  381. 'Remote file_id' : remote_file_id,
  382. 'Content' : file_buffer,
  383. 'Download size' : downloaded_size,
  384. 'Storage IP' : storage_ip
  385. }
  386. '''
  387. tmp = split_remote_fileid(remote_file_id)
  388. if not tmp:
  389. raise DataError('[-] Error: remote_file_id is invalid.(in download file)')
  390. group_name, remote_filename = tmp
  391. if not offset:
  392. file_offset = long(offset)
  393. if not down_bytes:
  394. download_bytes = long(down_bytes)
  395. tc = Tracker_client(self.tracker_pool)
  396. store_serv = tc.tracker_query_storage_fetch(group_name, remote_filename)
  397. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  398. file_buffer = None
  399. return store.storage_download_to_buffer(tc, store_serv, file_buffer, \
  400. file_offset, download_bytes, \
  401. remote_filename)
  402. def list_one_group(self, group_name):
  403. '''
  404. List one group information.
  405. arguments:
  406. @group_name: string, group name will be list
  407. @return Group_info, instance
  408. '''
  409. tc = Tracker_client(self.tracker_pool)
  410. return tc.tracker_list_one_group(group_name)
  411. def list_servers(self, group_name, storage_ip = None):
  412. '''
  413. List all storage servers information in a group
  414. arguments:
  415. @group_name: string
  416. @return dictionary {
  417. 'Group name' : group_name,
  418. 'Servers' : server list,
  419. }
  420. '''
  421. tc = Tracker_client(self.tracker_pool)
  422. return tc.tracker_list_servers(group_name, storage_ip)
  423. def list_all_groups(self):
  424. '''
  425. List all group information.
  426. @return dictionary {
  427. 'Groups count' : group_count,
  428. 'Groups' : list of groups
  429. }
  430. '''
  431. tc = Tracker_client(self.tracker_pool)
  432. return tc.tracker_list_all_groups()
  433. def get_meta_data(self, remote_file_id):
  434. '''
  435. Get meta data of remote file.
  436. arguments:
  437. @remote_fileid: string, remote file id
  438. @return dictionary, meta data
  439. '''
  440. tmp = split_remote_fileid(remote_file_id)
  441. if not tmp:
  442. raise DataError('[-] Error: remote_file_id is invalid.(in get meta data)')
  443. group_name, remote_filename = tmp
  444. tc = Tracker_client(self.tracker_pool)
  445. store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
  446. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  447. return store.storage_get_metadata(tc, store_serv, remote_filename)
  448. def set_meta_data(self, remote_file_id, \
  449. meta_dict, op_flag = STORAGE_SET_METADATA_FLAG_OVERWRITE):
  450. '''
  451. Set meta data of remote file.
  452. arguments:
  453. @remote_file_id: string
  454. @meta_dict: dictionary
  455. @op_flag: char, 'O' for overwrite, 'M' for merge
  456. @return dictionary {
  457. 'Status' : status,
  458. 'Storage IP' : storage_ip
  459. }
  460. '''
  461. tmp = split_remote_fileid(remote_file_id)
  462. if not tmp:
  463. raise DataError('[-] Error: remote_file_id is invalid.(in set meta data)')
  464. group_name, remote_filename = tmp
  465. tc = Tracker_client(self.tracker_pool)
  466. try:
  467. store_serv = tc.tracker_query_storage_update(group_name, remote_filename)
  468. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  469. status = store.storage_set_metadata(tc, store_serv, \
  470. remote_filename, meta_dict)
  471. except (ConnectionError, ResponseError, DataError):
  472. raise
  473. #if status == 2:
  474. # raise DataError('[-] Error: remote file %s is not exist.' % remote_file_id)
  475. if status != 0:
  476. raise DataError('[-] Error: %d, %s' % (th.status, os.strerror(th.status)))
  477. ret_dict = {}
  478. ret_dict['Status'] = 'Set meta data success.'
  479. ret_dict['Storage IP'] = store_serv.ip_addr
  480. return ret_dict
  481. def append_by_filename(self, local_filename, remote_fileid):
  482. isfile, errmsg = fdfs_check_file(local_filename)
  483. if not isfile:
  484. raise DataError(errmsg + '(append)')
  485. tmp = split_remote_fileid(remote_fileid)
  486. if not tmp:
  487. raise DataError('[-] Error: remote_file_id is invalid.(append)')
  488. group_name, appended_filename = tmp
  489. tc = Tracker_client(self.tracker_pool)
  490. store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
  491. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  492. return store.storage_append_by_filename(tc, store_serv, local_filename, \
  493. appended_filename)
  494. def append_by_file(self, local_filename, remote_fileid):
  495. isfile, errmsg = fdfs_check_file(local_filename)
  496. if not isfile:
  497. raise DataError(errmsg + '(append)')
  498. tmp = split_remote_fileid(remote_fileid)
  499. if not tmp:
  500. raise DataError('[-] Error: remote_file_id is invalid.(append)')
  501. group_name, appended_filename = tmp
  502. tc = Tracker_client(self.tracker_pool)
  503. store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
  504. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  505. return store.storage_append_by_file(tc, store_serv, local_filename, \
  506. appended_filename)
  507. def append_by_buffer(self, file_buffer, remote_fileid):
  508. if not file_buffer:
  509. raise DataError('[-] Error: file_buffer can not be null.')
  510. tmp = split_remote_fileid(remote_fileid)
  511. if not tmp:
  512. raise DataError('[-] Error: remote_file_id is invalid.(append)')
  513. group_name, appended_filename = tmp
  514. tc = Tracker_client(self.tracker_pool)
  515. store_serv = tc.tracker_query_storage_update(group_name, appended_filename)
  516. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  517. return store.storage_append_by_buffer(tc, store_serv, file_buffer, \
  518. appended_filename)
  519. def truncate_file(self, truncated_filesize, appender_fileid):
  520. '''
  521. Truncate file in Storage server.
  522. arguments:
  523. @truncated_filesize: long
  524. @appender_fileid: remote_fileid
  525. @return: dictionary {
  526. 'Status' : 'Truncate successed.',
  527. 'Storage IP' : storage_ip
  528. }
  529. '''
  530. trunc_filesize = long(truncated_filesize)
  531. tmp = split_remote_fileid(appender_fileid)
  532. if not tmp:
  533. raise DataError('[-] Error: appender_fileid is invalid.(truncate)')
  534. group_name, appender_filename = tmp
  535. tc = Tracker_client(self.tracker_pool)
  536. store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
  537. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  538. return store.storage_truncate_file(tc, store_serv, trunc_filesize, \
  539. appender_filename)
  540. def modify_by_filename(self, filename, appender_fileid, offset = 0):
  541. '''
  542. Modify a file in Storage server by file.
  543. arguments:
  544. @filename: string, local file name
  545. @offset: long, file offset
  546. @appender_fileid: string, remote file id
  547. @return: dictionary {
  548. 'Status' : 'Modify successed.',
  549. 'Storage IP' : storage_ip
  550. }
  551. '''
  552. isfile, errmsg = fdfs_check_file(filename)
  553. if not isfile:
  554. raise DataError(errmsg + '(modify)')
  555. filesize = os.stat(filename).st_size
  556. tmp = split_remote_fileid(appender_fileid)
  557. if not tmp:
  558. raise DataError('[-] Error: remote_fileid is invalid.(modify)')
  559. group_name, appender_filename = tmp
  560. if not offset:
  561. file_offset = long(offset)
  562. else:
  563. file_offset = 0
  564. tc = Tracker_client(self.tracker_pool)
  565. store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
  566. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  567. return store.storage_modify_by_filename(tc, store_serv, filename, file_offset, \
  568. filesize, appender_filename)
  569. def modify_by_file(self, filename, appender_fileid, offset = 0):
  570. '''
  571. Modify a file in Storage server by file.
  572. arguments:
  573. @filename: string, local file name
  574. @offset: long, file offset
  575. @appender_fileid: string, remote file id
  576. @return: dictionary {
  577. 'Status' : 'Modify successed.',
  578. 'Storage IP' : storage_ip
  579. }
  580. '''
  581. isfile, errmsg = fdfs_check_file(filename)
  582. if not isfile:
  583. raise DataError(errmsg + '(modify)')
  584. filesize = os.stat(filename).st_size
  585. tmp = split_remote_fileid(appender_fileid)
  586. if not tmp:
  587. raise DataError('[-] Error: remote_fileid is invalid.(modify)')
  588. group_name, appender_filename = tmp
  589. if not offset:
  590. file_offset = long(offset)
  591. else:
  592. file_offset = 0
  593. tc = Tracker_client(self.tracker_pool)
  594. store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
  595. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  596. return store.storage_modify_by_file(tc, store_serv, filename, file_offset, \
  597. filesize, appender_filename)
  598. def modify_by_buffer(self, filebuffer, appender_fileid, offset = 0):
  599. '''
  600. Modify a file in Storage server by buffer.
  601. arguments:
  602. @filebuffer: string, file buffer
  603. @offset: long, file offset
  604. @appender_fileid: string, remote file id
  605. @return: dictionary {
  606. 'Status' : 'Modify successed.',
  607. 'Storage IP' : storage_ip
  608. }
  609. '''
  610. if not filebuffer:
  611. raise DataError('[-] Error: filebuffer can not be null.(modify)')
  612. filesize = len(filebuffer)
  613. tmp = split_remote_fileid(appender_fileid)
  614. if not tmp:
  615. raise DataError('[-] Error: remote_fileid is invalid.(modify)')
  616. group_name, appender_filename = tmp
  617. if not offset:
  618. file_offset = long(offset)
  619. else:
  620. file_offset = 0
  621. tc = Tracker_client(self.tracker_pool)
  622. store_serv = tc.tracker_query_storage_update(group_name, appender_filename)
  623. store = Storage_client(store_serv.ip_addr, store_serv.port, self.timeout)
  624. return store.storage_modify_by_buffer(tc, store_serv, filebuffer, file_offset, \
  625. filesize, appender_filename)