Category Archives: How to Fix

Realization of breakpoint download based on DIO in flutter

As there are not many articles downloaded from the shutter breakpoint, and there are few cases, I found them on the DIO case after searching for them for a long time, and it took me some time to be wronged
based on DIO: ^ 4.0.0 and Path_ Provider: ^ 2.0.2 implements the function of downloading a large file

Core code

import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'package:dio/dio.dart';
import 'package:path_provider/path_provider.dart';

class DownloadFile {
  /// Used to record the url being downloaded to avoid duplicate downloads
  static var downloadingUrls = Map<String, CancelToken>();

  /// Breakpoint downloading of large files
  static Future<void> download({
    required String url,
    required String savePath,
    ProgressCallback?onReceiveProgress,
    void Function()?done,
    void Function(DioError)?failed,
  }) async {
    int downloadStart = 0;
    bool fileExists = false;
    File f = File(savePath);
    if (await f.exists()) {
      downloadStart = f.lengthSync();
      fileExists = true;
    }
    print("start: $downloadStart");
    if (fileExists && downloadingUrls.containsKey(url)) {
      return;
    }
    var dio = Dio();
    CancelToken cancelToken = CancelToken();
    downloadingUrls[url] = cancelToken;
    try {
      var response = await dio.get<ResponseBody>(
        url,
        options: Options(
          /// Receive response data as a stream
          responseType: ResponseType.stream,
          followRedirects: false,
          headers: {
            /// Downloading key locations in segments
            "range": "bytes=$downloadStart-",
          },
        ),
      );
      File file = File(savePath);
      RandomAccessFile raf = file.openSync(mode: FileMode.append);
      int received = downloadStart;
      int total = await _getContentLength(response);
      Stream<Uint8List> stream = response.data!.stream;
      StreamSubscription<Uint8List>?subscription;
      subscription = stream.listen(
        (data) {
          /// Write files must be synchronized
          raf.writeFromSync(data);
          received += data.length;
          onReceiveProgress?.call(received, total);
        },
        onDone: () async {
          downloadingUrls.remove(url);
          await raf.close();
          done?.call();
        },
        onError: (e) async {
          await raf.close();
          downloadingUrls.remove(url);
          failed?.call(e as DioError);
        },
        cancelOnError: true,
      );
      cancelToken.whenCancel.then((_) async {
        await subscription?.cancel();
        await raf.close();
      });
    } on DioError catch (error) {
      /// The request has been sent and the server responds with a status code that it is not in the range of 200
      if (CancelToken.isCancel(error)) {
        print("Download cancelled");
      } else {
        failed?.call(error);
      }
      downloadingUrls.remove(url);
    }
  }

  /// Get the size of the downloaded file
  static Future<int> _getContentLength(Response<ResponseBody> response) async {
    try {
      var headerContent =
          response.headers.value(HttpHeaders.contentRangeHeader);
      print("download files: $headerContent");
      if (headerContent != null) {
        return int.parse(headerContent.split('/').last);
      } else {
        return 0;
      }
    } catch (e) {
      return 0;
    }
  }

  /// Cancel Mission
  static void cancelDownload(String url) {
    downloadingUrls[url]?.cancel();
    downloadingUrls.remove(url);
  }
}

Call case

void main() {
  runApp(TestMyApp());
}

class TestMyApp extends StatefulWidget {
  @override
  State<StatefulWidget> createState() {
    return _TestMyAppState();
  }
}

class _TestMyAppState extends State<TestMyApp> {
  void download() async {
    var url = "mp4";
    Directory dir = await getApplicationDocumentsDirectory();
    var sDCardDir = dir.path;
    var savePath = sDCardDir + "/video/1.mp4";
    File f = File(sDCardDir + "/video");
    if (!await f.exists()) {
      new Directory(sDCardDir + "/video").createSync();
    }

    await DownLoadManage().download(
      url: url,
      savePath: savePath,
      onReceiveProgress: (received, total) {
        if (total != -1) {
          print("Download1Received:" +
              received.toString() +
              "Total:" +
              total.toString() +
              "Progress.+${(received/total * 100).floor()}%");
        }
      },
      done: () {
        print("Download 1 completed");
      },
      failed: (e) {
        print("Download 1 failed:" + e.toString());
      },
    );
  }

  @override
  Widget build(BuildContext context) {
    return Container(
      color: Colors.white,
      child: Center(
        child: GestureDetector(
          onTap: () {
            download();
          },
          child: Container(
            width: 150,
            height: 150,
            color: Colors.red,
          ),
        ),
      ),
    );
  }
}

[Solved] Es delete all the data in the index without deleting the index structure, including curl deletion

Scenario: if you want to delete only the data under the index without deleting the index structure, there is no postman tool in the (Windows Environment) server

First, only delete all the data in the index without deleting the index structure

POST 192.168.100.88:9200/my_index/_delete_by_query


get
{
  "query": {
    "match_all": {}
  }
}


Notes:
where my_index is the index name

Second, delete the specified data in the index without deleting the index structure

HEADER
DELETE 192.168.100.88:9200/log_index/log_type/D8D1D480190945C2A50B32D2255AA3D3



Notes.
where log_index is the index name, log_type is the index type, and D8D1D480190945C2A50B32D2255AA3D3 is the document id




Third: delete all data and index structure

DELETE 192.168.100.88:9200/my_index


Notes.
where my_index is the index name

Curl deletion in Windows

First, delete all data, including index structure

curl  -X DELETE "http://192.168.100.88:9200/my_index"

Second: delete all data without deleting index structure

curl  -XPOST "http://192.168.100.88:9200/log_index/_delete_by_query?pretty=true" -d "{"""query""":{"""match_all""": {}}}"

Among them: note when using curl (double quotation marks must be used in Windows Environment), single quotation mark will report the following error

“‘http” not supported or disabled in libcurl

C:\Users\admin>curl  -X DELETE 'http://192.168.100.88:9200/my_index'
curl: (1) Protocol "'http" not supported or disabled in libcurl

The problem of strncpy and the solution of GCC compile time checking

There is a problem with strcpy(), but there is still a problem with strncpy().

#define PATH_MAX 128

e.g. strncpy(file_path, src, PATH_MAX);

Strncpy () is prone to two problems: 1) the source string is too long, resulting in no terminator, and there are out of bounds garbled references. 2) the length of string is easily inconsistent with the nominal length. Like file_ Path says that it can support 128 characters at most. However, if the terminator is included, it is usually only 127.

Strncpy source code: when SRC reaches count, there is no terminator:

char* strncpy(char* dest, const char* source, size_t count)
{
	char* start=dest;
	while (count && (*dest++=*source++))
		count--;
	if(count)
		while (--count)
			*dest++='\0';
	return(start);
}

A good solution is:

1. Ensure that the length of the string is the nominal length, not the virtual mark

2. Secondly, it is not allowed to cross the boundary. It must have the correct ending character ‘\ 0’

3. The length of DST must be at least one character longer than Src

The possible implementation is as follows: using safe_ Strncpy (), and get DST length for security check

#define PATH_MAX 128
char file_path[PATH_MAX+1];

e.g. safe_strncpy(file_path, src,PATH_MAX+1, PATH_MAX);
int safe_strncpy(char *dst, const char *src, size_t dst_size,size_t str_size)
{
    if(dst_size <= str_size)
        return -1;
    dst[dst_size-1] = '\0';
    return strncpy(dst,src,str_size);
}

The key is to make sure that the DST string is long enough.

It’s not very good to check at run time. The reason is that you have to test it before you can find the problem.

A better solution is to check the DST length during compilation.

GCC compile time checking scheme

GCC seems to have started to support compile time assertions in 4.3

_ Static_ assert( expr,”msg”)

If the code is wrongly written and the DST space is equal to size, there is no terminator bug and an error should be reported.

The following font string file_ Path supports 128 characters, but file does not_ The path definition space is also 128, which should be defined as 129

#define PATH_MAX 128
char file_path[PATH_MAX];

e.g. safe_strncpy(file_path, src, PATH_MAX);

The results are as follows

../include/comm.h:202:5: error: static assertion failed: "strncp small buf size error"
     _Static_assert( sizeof(dst) > size,"strncp small buf size error");\
     ^
cmd_mark.c:137:13: note: in expansion of macro ‘safe_strncpy’
             safe_strncpy(file_path, optarg, OS_PATH_MAX);

The code is as follows:

#define safe_strncpy(dst,src,size)  \
do { \
    _Static_assert( sizeof(dst) > size,"strncp small buf size error");\
   _safe_strncpy(dst,src,sizeof(dst),size); \
        }while(0)

In this way, all calls to safe can be guaranteed as long as they are compiled_ There’s enough space for strncpy().

The power of compile time assertions lies in finding problems earlier than at run time. If someone misuses safe_ Strncpy (char * PTR, SRC, size) also finds problems in code ahead of time, rather than at run time or even after release.

Pg_dump Error: pg_dump: No matching tables were found,pg_dump: schema with OID 1515227 does not exi

Use SQL query
to create functions and copy them directly

CREATE OR REPLACE FUNCTION public.show_create_table(
  in_schema_name varchar,
  in_table_name varchar
)
RETURNS text
LANGUAGE plpgsql VOLATILE
AS
$$
  DECLARE
    -- the ddl we're building
    v_table_ddl text;

    -- data about the target table
    v_table_oid int;

    -- records for looping
    v_column_record record;
    v_constraint_record record;
    v_index_record record;
  BEGIN
    -- grab the oid of the table; https://www.postgresql.org/docs/8.3/catalog-pg-class.html
    SELECT c.oid INTO v_table_oid
    FROM pg_catalog.pg_class c
    LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
    WHERE 1=1
      AND c.relkind = 'r' -- r = ordinary table; https://www.postgresql.org/docs/9.3/catalog-pg-class.html
      AND c.relname = in_table_name -- the table name
      AND n.nspname = in_schema_name; -- the schema

    -- throw an error if table was not found
    IF (v_table_oid IS NULL) THEN
      RAISE EXCEPTION 'table does not exist';
    END IF;

    -- start the create definition
    v_table_ddl := 'CREATE TABLE ' || in_schema_name || '.' || in_table_name || ' (' || E'\n';

    -- define all of the columns in the table; https://stackoverflow.com/a/8153081/3068233
    FOR v_column_record IN
      SELECT
        c.column_name,
        c.data_type,
        c.character_maximum_length,
        c.is_nullable,
        c.column_default
      FROM information_schema.columns c
      WHERE (table_schema, table_name) = (in_schema_name, in_table_name)
      ORDER BY ordinal_position
    LOOP
      v_table_ddl := v_table_ddl || '  ' -- note: two char spacer to start, to indent the column
        || v_column_record.column_name || ' '
        || v_column_record.data_type || CASE WHEN v_column_record.character_maximum_length IS NOT NULL THEN ('(' || v_column_record.character_maximum_length || ')') ELSE '' END || ' '
        || CASE WHEN v_column_record.is_nullable = 'NO' THEN 'NOT NULL' ELSE 'NULL' END
        || CASE WHEN v_column_record.column_default IS NOT null THEN (' DEFAULT ' || v_column_record.column_default) ELSE '' END
        || ',' || E'\n';
    END LOOP;

    -- define all the constraints in the; https://www.postgresql.org/docs/9.1/catalog-pg-constraint.html && https://dba.stackexchange.com/a/214877/75296
    FOR v_constraint_record IN
      SELECT
        con.conname as constraint_name,
        con.contype as constraint_type,
        CASE
          WHEN con.contype = 'p' THEN 1 -- primary key constraint
          WHEN con.contype = 'u' THEN 2 -- unique constraint
          WHEN con.contype = 'f' THEN 3 -- foreign key constraint
          WHEN con.contype = 'c' THEN 4
          ELSE 5
        END as type_rank,
        pg_get_constraintdef(con.oid) as constraint_definition
      FROM pg_catalog.pg_constraint con
      JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid
      JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
      WHERE nsp.nspname = in_schema_name
      AND rel.relname = in_table_name
      ORDER BY type_rank
    LOOP
      v_table_ddl := v_table_ddl || '  ' -- note: two char spacer to start, to indent the column
        || 'CONSTRAINT' || ' '
        || v_constraint_record.constraint_name || ' '
        || v_constraint_record.constraint_definition
        || ',' || E'\n';
    END LOOP;

    -- drop the last comma before ending the create statement
    v_table_ddl = substr(v_table_ddl, 0, length(v_table_ddl) - 1) || E'\n';

    -- end the create definition
    v_table_ddl := v_table_ddl || ');' || E'\n';

    -- suffix create statement with all of the indexes on the table
    FOR v_index_record IN
      SELECT indexdef
      FROM pg_indexes
      WHERE (schemaname, tablename) = (in_schema_name, in_table_name)
    LOOP
      v_table_ddl := v_table_ddl
        || v_index_record.indexdef
        || ';' || E'\n';
    END LOOP;

    -- return the ddl
    RETURN v_table_ddl;
  END;
$$;

example

example:
SELECT * FROM public.show_create_table('public', 'example_table');

produce

CREATE TABLE public.example_table (
  id bigint NOT NULL DEFAULT nextval('test_tb_for_show_create_on_id_seq'::regclass),
  name character varying(150) NULL,
  level character varying(50) NULL,
  description text NOT NULL DEFAULT 'hello there!'::text,
  CONSTRAINT test_tb_for_show_create_on_pkey PRIMARY KEY (id),
  CONSTRAINT test_tb_for_show_create_on_level_check CHECK (((level)::text = ANY ((ARRAY['info'::character varying, 'warn'::character varying, 'error'::character varying])::text[])))
);
CREATE UNIQUE INDEX test_tb_for_show_create_on_pkey ON public.test_tb_for_show_create_on USING btree (id);

Stack overflow moved here. I’ve been looking for it for a long time. It’s easy to use

[Solved] Spark Writer tidb Error: read-uncommitted is not supported

Error Log:
Caused by: java.sql.SQLException: The isolation level ‘READ-UNCOMMITTED’ is not supported. Set tidb_skip_isolation_level_check=1 to skip this error

Solution:

val tidb_url = ""//Fill in your own tidb url
val table_name = "aa"//the tidb table to insert
val properties = new Properties()
properties.setProperty("user","username") properties.setProperty("password","123456")
sparkSession.sql("select * from table").write.mode(SaveMode.Append).option("isolationLevel","NONE").jdbc(tidb_url,table_name,properties)

How to Solve Error: The emulator process for AVD was killed.

How to solve the problem of the emulator process for AVD was killed.
my error report:

looking at the error report means that the process has been killed
trying to start from the command line

looking at the error report means that there is no more space
so it’s easy to do
Modify pixel.ini in the directory of C: (users/administrator. Android/AVD

it used to be C: (use) rs\Administrator.android\avd\Pixel_ 2_ API_ 28. AVD
the screenshot is my configured directory

There’s enough space here
restart the simulator and start it immediately

Multi file decompression method GZ00 gz01 zip00 zip01 in windors Linux

Decompress multiple compressed files method: first merge, and then decompress

Windows:

Win + R, enter CMD, then enter the CMD terminal, CD to the compressed file directory, and then merge with the following command.

copy /B xxx.zip.001 + xxx.zip.002 + xxx.zip.003 1.zip

Decompress the generated 1. Zip

Note: the space and + sign in the middle

Linux:cat xxx.tar.gz* >& gt; xxx.tar.gz

Decompress the generated xxx.tar.gz:

tar -zxvpf xxx.tar.gz

Note: the compressed name corresponding to the transposition of XXX

ModuleNotFoundError: No module named ‘numpy.testing.nosetester‘

 File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/allennlp/data/data_loaders/multiprocess_data_loader.py", line 16, in <module>
    from allennlp.data.data_loaders.data_collator import DataCollator, DefaultDataCollator
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/allennlp/data/data_loaders/data_collator.py", line 3, in <module>
    from transformers.data.data_collator import DataCollatorForLanguageModeling
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/transformers/data/__init__.py", line 19, in <module>
    from .metrics import glue_compute_metrics, xnli_compute_metrics
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/transformers/data/metrics/__init__.py", line 23, in <module>
    from sklearn.metrics import f1_score, matthews_corrcoef
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/sklearn/metrics/__init__.py", line 7, in <module>
    from .ranking import auc
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/sklearn/metrics/ranking.py", line 25, in <module>
    from scipy.stats import rankdata
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/scipy/stats/__init__.py", line 348, in <module>
    from .stats import *
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/scipy/stats/stats.py", line 177, in <module>
    from . import distributions
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/scipy/stats/distributions.py", line 13, in <module>
    from . import _continuous_distns
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/scipy/stats/_continuous_distns.py", line 15, in <module>
    from scipy._lib._numpy_compat import broadcast_to
  File "/home/jsj201-6/anaconda3/lib/python3.6/site-packages/scipy/_lib/_numpy_compat.py", line 10, in <module>
    from numpy.testing.nosetester import import_nose
ModuleNotFoundError: No module named 'numpy.testing.nosetester'

terms of settlement:

Update the version of SciPy:

pip install numpy==1.18
pip install scipy==1.1.0
pip install scikit-learn==0.21.3

The problem is solved.