text nginx的设置-HTTPS的上窗口

的Windows下用Nginx的配置HTTPS服务器

nginx-set-https-on-windows.txt
1、安装openssl:http://slproweb.com/products/Win32OpenSSL.html。

2、设置环境变量
a)新建变量:OPENSSL_HOME => C:\OpenSSL-Win64\bin;
b)Path变量添加:%OPENSSL_HOME%。

3、创建私钥
C:\nginx\ssl> openssl genrsa -des3 -out [name].key 1024
[name]为文件名,任意
输入密码,完成

4、创建csr证书
C:\nginx\ssl> openssl req -new -key [name].key -out [name].csr
填写信息,CommonName 输入网站域名,如localhost,xxx.yyy.zzz

5、创建指纹密码
复制 [name].key并重命名为 [name].key.org
C:\nginx\ssl> openssl rsa -in [name].key.org -out [name].key
输入密码,完成

6、生成crt证书
C:\nginx\ssl> openssl x509 -req -days 365 -in [name].csr -signkey [name].key -out [name].crt

7、修改nginx配置

server {
    listen       443 ssl;
    server_name  localhost;

    ssl_certificate      C://nginx//ssl//[name].crt;  # 这个是证书的crt文件所在目录
    ssl_certificate_key  C://nginx//ssl//[name].key;  # 这个是证书key文件所在目录

    ssl_session_cache    shared:SSL:1m;
    ssl_session_timeout  5m;

    ssl_ciphers  HIGH:!aNULL:!MD5;
    ssl_prefer_server_ciphers  on;

    location / {
      root   html;                  # 这个是指定一个项目所在目录
      index  index.html index.htm;  # 这个是指定首页的文件名
    }
}

8、nginx重载
C:\nginx> nginx.exe -s reload

9、浏览器访问https域名

参考链接:http://mobilesite.github.io/2017/03/11/windows-nginx-https-config/

text 这是在升级属性期间使用的字段列表

这是在升级属性期间使用的字段列表

magento2_upgrade_script_fields.txt
2018-05-10T07:56:38+00:00 INFO (6): Array
(
    [attribute_id] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => attribute_id
            [COLUMN_POSITION] => 1
            [DATA_TYPE] => smallint
            [DEFAULT] => 
            [NULLABLE] => 
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 1
            [PRIMARY] => 1
            [PRIMARY_POSITION] => 1
            [IDENTITY] => 1
        )

    [entity_type_id] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => entity_type_id
            [COLUMN_POSITION] => 2
            [DATA_TYPE] => smallint
            [DEFAULT] => 0
            [NULLABLE] => 
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 1
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [attribute_code] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => attribute_code
            [COLUMN_POSITION] => 3
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [attribute_model] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => attribute_model
            [COLUMN_POSITION] => 4
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [backend_model] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => backend_model
            [COLUMN_POSITION] => 5
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [backend_type] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => backend_type
            [COLUMN_POSITION] => 6
            [DATA_TYPE] => varchar
            [DEFAULT] => static
            [NULLABLE] => 
            [LENGTH] => 8
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [backend_table] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => backend_table
            [COLUMN_POSITION] => 7
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [frontend_model] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => frontend_model
            [COLUMN_POSITION] => 8
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [frontend_input] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => frontend_input
            [COLUMN_POSITION] => 9
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 50
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [frontend_label] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => frontend_label
            [COLUMN_POSITION] => 10
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [frontend_class] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => frontend_class
            [COLUMN_POSITION] => 11
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [source_model] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => source_model
            [COLUMN_POSITION] => 12
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [is_required] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => is_required
            [COLUMN_POSITION] => 13
            [DATA_TYPE] => smallint
            [DEFAULT] => 0
            [NULLABLE] => 
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 1
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [is_user_defined] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => is_user_defined
            [COLUMN_POSITION] => 14
            [DATA_TYPE] => smallint
            [DEFAULT] => 0
            [NULLABLE] => 
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 1
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [default_value] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => default_value
            [COLUMN_POSITION] => 15
            [DATA_TYPE] => text
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [is_unique] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => is_unique
            [COLUMN_POSITION] => 16
            [DATA_TYPE] => smallint
            [DEFAULT] => 0
            [NULLABLE] => 
            [LENGTH] => 
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 1
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

    [note] => Array
        (
            [SCHEMA_NAME] => 
            [TABLE_NAME] => eav_attribute
            [COLUMN_NAME] => note
            [COLUMN_POSITION] => 17
            [DATA_TYPE] => varchar
            [DEFAULT] => 
            [NULLABLE] => 1
            [LENGTH] => 255
            [SCALE] => 
            [PRECISION] => 
            [UNSIGNED] => 
            [PRIMARY] => 
            [PRIMARY_POSITION] => 
            [IDENTITY] => 
        )

)

text 如何在Contao中添加新页面

How to Add a New Page in Contao
1 -  Login to contao and select Layout/Site Structure
2- Click on New Site
3- Add/Edit Articles of Page

text АлгоритмудаленияпоказанийИПУ

txt
1. Через Microsoft SQL Server Management Studio конектимся к серверу server grc3, выбираем базе sn_Saratov.Saratov.ApartmentCounterIndications

2. Через Mysql Workbench заходим в базу pc_db, выбираем таблицу pc_log_indication

3. находим в таблице pc_log_indication нужные показания и берем их counterid

4. Удаляем из Saratov.ApartmentCounterIndications необходимые счетчики

5. Удаляем записи из pc_log_indication

text Solicitud de blogs,solicitar nuevo blog autorizacion cau crear

solicitud
Hay que pedir autorización. Quien los autoriza es Ramón: rrodriguez@csi.uned.es antes de crear
un blog necesitamos su autorización.



La creación de nuevos blogs ha de ser autorizada por el CAU. Para ello hay que cursar una petición de blogs a través del CAU, dirigiéndola a JAIME DARGALLO SAIZ, nosotros no tenemos capacidad de aprobación. A continuación te adjunto los pasos a seguir.

 
A la hora de solicitar el blog entre otros datos tendrás que proporcionar: tu nombre y apellidos. El nombre, apellidos y dirección de email de cada uno de los administradores que se van a encargar de gestionar y actualizar contenido del blog. El nombre completo del blog, y el identificador corto que servirá para acceder a la página desde internet. El identificador será un texto sin espacios del tipo 'cursocatedra', 'mooc' o 'filosofiasigloxx', mientras que el nombre completo del blog es el título que se le quiera dar la página, sin restricciones especiales.

 

1) Para crear un nuevo ticket del CAU, accede al portal UNED http://portal.uned.es/ . En la parte superior verás un botón  que dice CAU - Avisos

2) Al acceder selecciona la opción "Haga click aquí para acceder a la aplicación"

3) y por último podrás escribir tu solicitud haciendo click en "incidencia / consulta / petición"



Una vez lo aprueben, nos llegarán directamente las solicitudes correspondientes y nos pondremos en contacto contigo para entregarte las credenciales de acceso.

 

Un saludo y muchas gracias.

text 连接到没有dbcontext的数据库

connect to database without dbcontext
var optionsBuilder = new DbContextOptionsBuilder<DickerDataContext>();
optionsBuilder.UseSqlServer("connectstring");
DickerDataContext dickerDataContext = new DickerDataContext(optionsBuilder.Options);

text 删除所有合并的分支到master / dev

del-merged-git
git branch --merged | egrep -v "(^\*|master|dev)" | xargs git branch -d

text 在Android中使用PlaceAutoComplete Builder时出错的说明

在Android中使用PlaceAutoComplete Builder时出错的说明

autocompleteError.txt
// call Google Places Autocomplete 
  try {
        AutocompleteFilter autocompleteFilter = new AutocompleteFilter.Builder()
                .setTypeFilter(Place.TYPE_COUNTRY)
                .setCountry("GH")
                .build();
        Intent intent =
                new PlaceAutocomplete.IntentBuilder(PlaceAutocomplete.MODE_OVERLAY)
                        .setFilter(autocompleteFilter)
                        .build(AddressActivity.this);
        startActivityForResult(intent, PLACE_AUTOCOMPLETE_REQUEST_CODE);
    } catch (GooglePlayServicesRepairableException e) {
        // TODO: Handle the error.
    } catch (GooglePlayServicesNotAvailableException e) {
        // TODO: Handle the error.
    }
    
    //onActivity Result
    @Override
    public void onActivityResult(int requestCode, int resultCode, Intent data) {
        super.onActivityResult(requestCode, resultCode, data);
        Log.e("Done "+ requestCode,"Requesting "+ resultCode);
        if (requestCode == PLACE_AUTOCOMPLETE_REQUEST_CODE) {
            if (resultCode == RESULT_OK) {
                Place place = PlaceAutocomplete.getPlace(this, data);
                office.setText(place.getName());
            } else if (resultCode == PlaceAutocomplete.RESULT_ERROR) {
                Status status = PlaceAutocomplete.getStatus(this, data);
                Log.i("Address", status.getStatusMessage());
            }
        }

    }

text 这个要点的描述

这个要点的描述

file1.txt
String file contents

text 谷歌应用程序脚本的刮刀维基代码

谷歌应用程序脚本的刮刀维基代码

scraperwiki.gs
/** @description
 * get data from scraperwiki into google apps script
 * See http://ramblings.mcpher.com/Home/excelquirks/codeuse for more details
 * @author <a href="mailto:bruce@mcpher.com">Bruce McPherson</a><a href="http://ramblings.mcpher.com"> ramblings.mcpher.com</a>
 */

/**
 * swSeewhatworks see which scraperwikis have tables and update a list (as returned by rest entry scraperwiki) with default sql
 * @param {string} ws the worksheet name with the scraperwiki list of shortnames
 * @return {void} null
 */
function swSeewhatworks(ws) {
    var ds = new cDataSet().populateData (wholeSheet(ws), 
                  undefined,undefined ,undefined ,undefined , undefined, true);
    var cache = sheetCache(ds.headingRow().where());
    
    ds.rows().forEach(
      function (dr) {
        cache.setValue(swGetDefaultTableSql(dr.cell("short_name").toString(), false), 
              dr.where().getRow(), dr.columns().count()+1 );
      }
    );
    cache.close();
}
/**
 * swGetTables return the cRest result of query for table names
 * @param {string} shortName the scraperWiki key
 * @return {cRest} the result of the query for table names
 */
function swGetTables(shortName){
      var tableDirectory = "SELECT name FROM sqlite_master " +
        "WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' " +
        "Union all " +
        "SELECT name FROM sqlite_temp_master " +
        "WHERE type IN ('table','view') " +
        "ORDER BY 1";

       return restQuery(undefined, "scraperwikidata", 
           shortName + "&query=" + tableDirectory,undefined ,undefined ,undefined ,undefined , false);
        
} 
/**
 * swGetDefaultTableSql look up to see what tables are defined in a given scraperwiki and return sql to get the first one
 * @param {string} shortName the scraperWiki key
 * @param {boolean} optComplain whether to complain if there is a problem
 * @return {string} the sql query to get data from first table
 */
function swGetDefaultTableSql(shortName, optComplain){
     
    var complain = fixOptional (optComplain,true);
    var cr = swGetTables(shortName);
    if (!cr) {
      MsgBox ("could get no info on " + shortName);
    }
    else {
      var job = cr.jObjects().count() ? cr.jObjects().item(1) : null;
      if (job && job.hasChildren()) {
        // this is hokey - for the moment just take from the first table found
        return "select * from '" +
                job.children(1).child("name").toString() + "'";
      }
      else {
        DebugPrint(shortName," did not return any tables: got this:", cr.responseData());
        if (complain) MsgBox ("could not find any valid tables for " +
            shortName + "(" + (job ? job.serialize() : "no data")  + ")")
      }
    } 
    return "";
}
/**
 * swCleanSheet create a clean results sheet with column headings
 * @param {cJobject} job contains the list of columns headings as keys
 * @param {string} ws the worksheet name to populate
 * @return {cDataSet} the dataset with the headings populated
 */
function swCleanSheet(job, ws) {
    // put headers to a clean sheet
    
    var ds = null;
    var cache = sheetCache(ws);
    cache.clear();
    
    if (job.hasChildren()) {
      job.children().forEach(
        function (cj,n) {
          cache.setValue(cj.key(),1,n);
        }
      ); 
      ds= new cDataSet().populateData( vResize (wholeSheet(ws), 1, job.children().count()));
  }
  cache.commit();
  return ds;
}
/**
 * swGetHeaders organize what headers are needed given the scraperWIki response
 * @param {cJobject} job contains the query response
 * @return {cJobject} a jobject with a list of keys for column headings
 */
function swGetHeaders(job) {
    // take scraper wiki data and generate an organized dataset using the headers found
    var cjKeys = new cJobject().init(null);
    job.children().forEach(
      function(cj) {
        cj.children().forEach( 
          function (jo) {
            cjKeys.add(jo.key());
          }
        );
      }
    );
    return cjKeys;
}
/**
 * scraperWikiStuff do the query and populate the data
 * @param {string} shortName the scraperwiki key
 * @param {string} ws the worksheet name to populate
 * @param {string} optSql the optional sql string to get the data
 * @param {number} optLimit the optional limit to number of rows to get
 * @return {cDataSet} the finished data
 */
function scraperWikiStuff(shortName, ws , optSql, optLimit) {
    // sort out the optional args
    
    var sql = fixOptional (optSql, swGetDefaultTableSql(shortName));
    var limit = IsMissing(optLimit) ? "" : "&limit=" + CStr(optLimit);
    var ds = null;
    // get the data
    var cr = restQuery(undefined, "scraperwikidata", 
       shortName + "&query=" + sql + limit,undefined ,undefined ,undefined ,undefined , false);

    //now organize it
    if(cr) {
       // get the unique headers and put them to a clean data set
       var crj = cr.jObject();
       var headJob = swGetHeaders(crj);
       if (!headJob) {
            MsgBox ("didnt work at all " + crj.serialize())
       }
       else {
            ds = swCleanSheet(headJob, ws);
            if (!ds) {
                MsgBox ("failed to get the expected data " & crj.serialize())
            }
            else {
                var cache = sheetCache(ds.headingRow().where());
                var r = ds.headingRow().where().getRow();
                // we know how big the cache needs to be so do it once off
                cache.extend(crj.children().count()+1, ds.columns().count());
                 // this is the data returned - each array member is a row
                 crj.children().forEach(
                   function (cj,rIndex) {
                     cj.children().forEach (
                       function (job,cIndex) {
                         cache.setValue(job.value(), r + rIndex, cIndex);
                       }
                     );
                   }
                 );
                 cache.close();
            }
        }
     }
     
     return ds;
}